Skip to content

Commit

Permalink
HPCC-30967 Coverity Scan reported new defects related to Parquet plugin
Browse files Browse the repository at this point in the history
  • Loading branch information
jackdelv committed Dec 5, 2023
1 parent b330baf commit edc800b
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 6 deletions.
8 changes: 4 additions & 4 deletions plugins/parquet/parquetembed.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ arrow::Status ParquetReader::openReadFile()
failx("Incorrect partitioning type %s.", partOption.c_str());
}
// Create the dataset factory
PARQUET_ASSIGN_OR_THROW(auto datasetFactory, arrow::dataset::FileSystemDatasetFactory::Make(fs, selector, format, options));
PARQUET_ASSIGN_OR_THROW(auto datasetFactory, arrow::dataset::FileSystemDatasetFactory::Make(std::move(fs), std::move(selector), format, std::move(options)));

// Get scanner
PARQUET_ASSIGN_OR_THROW(auto dataset, datasetFactory->Finish());
Expand Down Expand Up @@ -372,7 +372,7 @@ arrow::Result<std::shared_ptr<arrow::Table>> ParquetReader::queryRows()
// Convert the current batch to a table
PARQUET_ASSIGN_OR_THROW(auto batch, *rbatchItr);
rbatchItr++;
std::vector<std::shared_ptr<arrow::RecordBatch>> toTable = {batch};
std::vector<std::shared_ptr<arrow::RecordBatch>> toTable = {std::move(batch)};
return std::move(arrow::Table::FromRecordBatches(std::move(toTable)));
}

Expand Down Expand Up @@ -457,7 +457,7 @@ arrow::Status ParquetWriter::openWriteFile()
ARROW_ASSIGN_OR_RAISE(auto filesystem, arrow::fs::FileSystemFromUriOrPath(destination));
auto format = std::make_shared<arrow::dataset::ParquetFileFormat>();
writeOptions.file_write_options = format->DefaultWriteOptions();
writeOptions.filesystem = filesystem;
writeOptions.filesystem = std::move(filesystem);
writeOptions.base_dir = destination;
writeOptions.partitioning = partitionType;
writeOptions.existing_data_behavior = arrow::dataset::ExistingDataBehavior::kOverwriteOrIgnore;
Expand All @@ -484,7 +484,7 @@ arrow::Status ParquetWriter::openWriteFile()
std::shared_ptr<parquet::ArrowWriterProperties> arrowProps = parquet::ArrowWriterProperties::Builder().store_schema()->build();

// Create a writer
ARROW_ASSIGN_OR_RAISE(writer, parquet::arrow::FileWriter::Open(*schema.get(), pool, outfile, props, arrowProps));
ARROW_ASSIGN_OR_RAISE(writer, parquet::arrow::FileWriter::Open(*schema.get(), pool, outfile, std::move(props), std::move(arrowProps)));
}
return arrow::Status::OK();
}
Expand Down
4 changes: 2 additions & 2 deletions plugins/parquet/parquetembed.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -878,7 +878,7 @@ class ParquetRowStream : public RtlCInterface, implements IRowStream
{
public:
ParquetRowStream(IEngineRowAllocator *_resultAllocator, std::shared_ptr<ParquetReader> _parquetReader)
: resultAllocator(_resultAllocator), parquetReader(_parquetReader) {}
: resultAllocator(_resultAllocator), parquetReader(std::move(_parquetReader)) {}
virtual ~ParquetRowStream() = default;

RTLIMPLEMENT_IINTERFACE
Expand Down Expand Up @@ -947,7 +947,7 @@ class ParquetRecordBinder : public CInterfaceOf<IFieldProcessor>
{
public:
ParquetRecordBinder(const IContextLogger &_logctx, const RtlTypeInfo *_typeInfo, int _firstParam, std::shared_ptr<ParquetWriter> _parquetWriter)
: logctx(_logctx), typeInfo(_typeInfo), firstParam(_firstParam), dummyField("<row>", NULL, typeInfo), thisParam(_firstParam), parquetWriter(_parquetWriter) {}
: logctx(_logctx), typeInfo(_typeInfo), firstParam(_firstParam), dummyField("<row>", NULL, typeInfo), thisParam(_firstParam), parquetWriter(std::move(_parquetWriter)) {}
virtual ~ParquetRecordBinder() = default;
int numFields();
void processRow(const byte *row);
Expand Down

0 comments on commit edc800b

Please sign in to comment.