diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml new file mode 100644 index 00000000..04900d71 --- /dev/null +++ b/.github/workflows/docker.yml @@ -0,0 +1,20 @@ +name: Docker build + +on: + push: + branches-ignore: + - "develop" + - "master" + - "release/*" + pull_request: + +jobs: + jpo-cvdp: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + - name: Build + uses: docker/build-push-action@v3 diff --git a/.github/workflows/dockerhub.yml b/.github/workflows/dockerhub.yml new file mode 100644 index 00000000..32d430c3 --- /dev/null +++ b/.github/workflows/dockerhub.yml @@ -0,0 +1,26 @@ +name: "DockerHub Build and Push" + +on: + push: + branches: + - "develop" + - "master" + - "release/*" +jobs: + dockerhub-Jpo-cvdp: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + - name: Login to DockerHub + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Build + uses: docker/build-push-action@v3 + with: + push: true + tags: usdotjpoode/jpo-cvdp:${{ github.ref_name }} diff --git a/CMakeLists.txt b/CMakeLists.txt index 5cbba6bb..eed946a6 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,20 +1,25 @@ +# Minimum required version of CMake cmake_minimum_required(VERSION 2.6) +# Project name project(ppm) -# need to set these prior to setting any targets. +# Set C++ standard to 11 and make it required set(CMAKE_CXX_STANDARD 11) set(CMAKE_CXX_STANDARD_REQUIRED ON) +# Optimization flags set(CMAKE_CXX_FLAGS "-O3") set(CMAKE_C_FLAGS "-O3") +# Set options for macOS if (${APPLE}) set(CMAKE_CXX_EXTENSIONS OFF) set(MACPORTS_DIR "/opt") endif () -if(CMAKE_COMPILER_IS_GNUCXX) # add coverage compiler option +# Add coverage compiler option for GNU C++ +if(CMAKE_COMPILER_IS_GNUCXX) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} --coverage") set(CMAKE_CXX_OUTPUT_EXTENSION_REPLACE 1) endif() @@ -32,53 +37,73 @@ include_directories("${CMAKE_CURRENT_SOURCE_DIR}/include/spdlog") include_directories("${CMAKE_CURRENT_SOURCE_DIR}/include/general-redaction") include_directories("/usr/local/include") +# Add include directory for macOS if (${APPLE}) - include_directories( "${MACPORTS_DIR}/local/include") + include_directories("${MACPORTS_DIR}/local/include") link_directories("${MACPORTS_DIR}/local/lib" "/usr/lib" "/usr/local/lib") endif () -#### BUILD TARGET FOR THE PPM #### - -set(PPM_SRC "src/general-redaction/redactionPropertiesManager.cpp" - "src/general-redaction/rapidjsonRedactor.cpp" - "src/bsm.cpp" - "src/bsmHandler.cpp" - "src/idRedactor.cpp" - "src/ppm.cpp" - "src/tool.cpp" - "src/velocityFilter.cpp" - "src/ppmLogger.cpp" - ) - -add_executable(ppm ${PPM_SRC}) -target_link_libraries(ppm pthread CVLib rdkafka++) - -#### BUILD TARGET FOR THE PPM UNIT TESTS AND CODE COVERAGE #### - -set(PPM_TEST_SRC "src/tests.cpp") # unit tests - +#### Build target for the PPM +# List all the source files in project +set(SOURCES + "src/general-redaction/redactionPropertiesManager.cpp" + "src/general-redaction/rapidjsonRedactor.cpp" + "src/bsm.cpp" + "src/bsmHandler.cpp" + "src/idRedactor.cpp" + "src/tool.cpp" + "src/velocityFilter.cpp" + "src/ppmLogger.cpp" +) + +# Create a library target for the shared sources +add_library(ppm-lib STATIC ${SOURCES}) + +# Link the library target with the Kafka libraries +target_link_libraries(ppm-lib PUBLIC + rdkafka + rdkafka++ +) + +#### Create a target for the PPM executable +add_executable(ppm "src/ppm.cpp") + +# Link the PPM executable with the PPM library target +target_link_libraries(ppm PUBLIC ppm-lib CVLib) + +#### Create a target for the Kafka consumer executable +add_executable(kafka_consumer "src/kafka_consumer.cpp") + +# Link the Kafka consumer executable with the PPM library target and the Kafka libraries +target_link_libraries(kafka_consumer PUBLIC ppm-lib + rdkafka + rdkafka++ + CVLib +) + +#### Build target for the PPM unit tests and code coverage +set(PPM_TEST_SRC "src/tests.cpp") # unit tests + +# Include the Catch header-only test framework set(CATCH_INCLUDE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/include/catch") add_library(Catch INTERFACE) -target_include_directories(Catch INTERFACE ${CATCH_INCLUDE_DIR}) # catch is header only; tell where to find header. +target_include_directories(Catch INTERFACE ${CATCH_INCLUDE_DIR}) - -add_executable(ppm_tests ${PPM_TEST_SRC} ${PPM_SRC}) # need everything to build tests. +# Build the tests executable +add_executable(ppm_tests ${PPM_TEST_SRC} ${SOURCES}) target_link_libraries(ppm_tests pthread CVLib rdkafka++ Catch) -target_compile_definitions(ppm_tests PRIVATE _PPM_TESTS) # flag to exclude the tool's main. - -#### BUILD TARGET FOR THE KAFKA TEST TOOL #### +target_compile_definitions(ppm_tests PRIVATE _PPM_TESTS) +#### Build target for the Kafka test tool add_subdirectory(kafka-test) -# Copy the data to the build. TODO make this part of the test or data target. +# Copy the data and config directories to the build directory set(BSM_DATA_DIR $/unit-test-data) set(BSM_CONFIG_DIR $/config) -# Make the base data directory. +# Make the base data directory and copy the data files add_custom_command(TARGET ppm PRE_BUILD COMMAND ${CMAKE_COMMAND} -E make_directory ${BSM_DATA_DIR}) -# Copy the data files. -add_custom_command(TARGET ppm PRE_BUILD COMMAND echo "Copying the data directory") add_custom_command(TARGET ppm PRE_BUILD COMMAND ${CMAKE_COMMAND} -E copy_directory ${PROJECT_SOURCE_DIR}/unit-test-data ${BSM_DATA_DIR}) @@ -91,9 +116,3 @@ add_custom_command(TARGET ppm POST_BUILD COMMAND echo "Copying the config direct add_custom_command(TARGET ppm POST_BUILD COMMAND ${CMAKE_COMMAND} -E copy_directory ${PROJECT_SOURCE_DIR}/config ${BSM_CONFIG_DIR}) - -# option(BUILD_TESTS "Determines whether to build tests." ON) -# if(BUILD_TESTS) -# enable_testing() -# add_test(NAME mytest1 COMMAND ppm_tests) -# endif() diff --git a/Dockerfile b/Dockerfile index 600551cd..d2a13c22 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,14 +4,20 @@ USER root WORKDIR /cvdi-stream # Add build tools. -RUN apt-get update && apt-get install -y g++ +RUN apt-get update && apt-get install -y software-properties-common wget git make gcc-7 g++-7 gcc-7-base && update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-7 100 && update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-7 100 # Install cmake. RUN apt install -y libprotobuf-dev protobuf-compiler RUN apt install -y cmake # Install librdkafka. -RUN apt-get install -y libsasl2-dev libsasl2-modules libssl-dev librdkafka-dev +RUN apt-get install -y sudo +RUN wget -qO - https://packages.confluent.io/deb/7.3/archive.key | sudo apt-key add - +RUN add-apt-repository "deb [arch=amd64] https://packages.confluent.io/deb/7.3 stable main" +RUN add-apt-repository "deb https://packages.confluent.io/clients/deb $(lsb_release -cs) main" +RUN apt update +RUN apt-get install -y libsasl2-modules libsasl2-modules-gssapi-mit libsasl2-dev libssl-dev +RUN apt install -y librdkafka-dev # add the source and build files ADD CMakeLists.txt /cvdi-stream diff --git a/Dockerfile-nsv b/Dockerfile-nsv index 69cd0977..7b8a9338 100644 --- a/Dockerfile-nsv +++ b/Dockerfile-nsv @@ -6,14 +6,20 @@ ARG PPM_MAP_FILE WORKDIR /cvdi-stream # Add build tools. -RUN apt-get update && apt-get install -y g++ +RUN apt-get update && apt-get install -y software-properties-common wget git make gcc-7 g++-7 gcc-7-base && update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-7 100 && update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-7 100 # Install cmake. RUN apt install -y libprotobuf-dev protobuf-compiler RUN apt install -y cmake # Install librdkafka. -RUN apt-get install -y libsasl2-dev libsasl2-modules libssl-dev librdkafka-dev +RUN apt-get install -y sudo +RUN wget -qO - https://packages.confluent.io/deb/7.3/archive.key | sudo apt-key add - +RUN add-apt-repository "deb [arch=amd64] https://packages.confluent.io/deb/7.3 stable main" +RUN add-apt-repository "deb https://packages.confluent.io/clients/deb $(lsb_release -cs) main" +RUN apt update +RUN apt-get install -y libsasl2-modules libsasl2-modules-gssapi-mit libsasl2-dev libssl-dev +RUN apt install -y librdkafka-dev # add the source and build files ADD CMakeLists.txt /cvdi-stream diff --git a/Dockerfile.standalone b/Dockerfile.standalone index 0dcefde7..43753293 100644 --- a/Dockerfile.standalone +++ b/Dockerfile.standalone @@ -4,14 +4,20 @@ USER root WORKDIR /cvdi-stream # Add build tools. -RUN apt-get update && apt-get install -y g++ +RUN apt-get update && apt-get install -y software-properties-common wget git make gcc-7 g++-7 gcc-7-base && update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-7 100 && update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-7 100 # Install cmake. RUN apt install -y libprotobuf-dev protobuf-compiler RUN apt install -y cmake # Install librdkafka. -RUN apt-get install -y libsasl2-dev libsasl2-modules libssl-dev librdkafka-dev +RUN apt-get install -y sudo +RUN wget -qO - https://packages.confluent.io/deb/7.3/archive.key | sudo apt-key add - +RUN add-apt-repository "deb [arch=amd64] https://packages.confluent.io/deb/7.3 stable main" +RUN add-apt-repository "deb https://packages.confluent.io/clients/deb $(lsb_release -cs) main" +RUN apt update +RUN apt-get install -y libsasl2-modules libsasl2-modules-gssapi-mit libsasl2-dev libssl-dev +RUN apt install -y librdkafka-dev # add the source and build files ADD CMakeLists.txt /cvdi-stream diff --git a/README.md b/README.md index acac1db0..191b102b 100644 --- a/README.md +++ b/README.md @@ -79,9 +79,9 @@ The documentation is in HTML and is written to the `/jpo-cvdp/docs browser. ## Class Usage Diagram -![class usage](./docs/diagrams/class-usage/PPM%20Class%20Usage%20With%20Files.drawio.png) +![class usage](./docs/diagrams/class-usage/PPM%20Class%20Usage.drawio.png) -This diagram shows the usage relationship between the classes of the project. Classes that are in the same file share the same white box. A class that uses another class will have a black arrow pointing to the latter. The PPM class extends the Tool class, and this is shown with a white arrow. +This diagram displays how the different classes in the project are used. If one class uses another class, there will be a black arrow pointing to the class it uses. The Tool class is extended by the PPM class, which is represented by a white arrow. # Development and Collaboration Tools @@ -136,8 +136,8 @@ Rather than using a local kafka instance, this project can utilize an instance o ## Environment variables ### Purpose & Usage - The DOCKER_HOST_IP environment variable is used to communicate with the bootstrap server that the instance of Kafka is running on. -- The KAFKA_TYPE environment variable specifies what type of kafka connection will be attempted and is used to check if Confluent should be utilized. -- The CONFLUENT_KEY and CONFLUENT_SECRET environment variables are used to authenticate with the bootstrap server. +- The KAFKA_TYPE environment variable specifies what type of kafka connection will be attempted and is used to check if Confluent should be utilized. If this is not set to "CONFLUENT", the PPM will attempt to connect to a local kafka instance. +- The CONFLUENT_KEY and CONFLUENT_SECRET environment variables are used to authenticate with the bootstrap server. These are the API key and secret that are generated when a new API key is created in Confluent Cloud. These are only used if the KAFKA_TYPE environment variable is set to "CONFLUENT". ### Values - DOCKER_HOST_IP must be set to the bootstrap server address (excluding the port) @@ -182,6 +182,9 @@ When running the project in the provided dev container, the REDACTION_PROPERTIES #### RPM Debug If the RPM_DEBUG environment variable is set to true, debug messages will be logged to a file by the RedactionPropertiesManager class. This will allow developers to see whether the environment variable is set, whether the file was found and whether a non-zero number of redaction fields were loaded in. +## Build & Exec Script +The [`build_and_exec.sh`](./build_and_exec.sh) script can be used to build a tagged image of the PPM, run the container & enter it with an interactive shell. This script can be used to test the PPM in a standalone environment. + ## Some Notes - The tests for this project can be run after compilation by running the "ppm_tests" executable. - When manually compiling with WSL, librdkafka will sometimes not be recognized. This can be resolved by utilizing the provided dev environment. diff --git a/build_and_exec.sh b/build_and_exec.sh new file mode 100644 index 00000000..61b137d1 --- /dev/null +++ b/build_and_exec.sh @@ -0,0 +1,7 @@ +# format of tag: 01-01-2020t12.00pm (lowercase t, am, pm) +tag=$(date +"%m-%d-%Yt%I.%M%p" | tr '[:upper:]' '[:lower:]') +echo "Building ppm-test-$tag" +docker build . -t ppm-test-$tag + +echo "Running ppm-test-$tag" +docker run -it ppm-test-$tag /bin/bash \ No newline at end of file diff --git a/docker-compose-standalone.yml b/docker-compose-standalone.yml index 3b043e31..41992658 100644 --- a/docker-compose-standalone.yml +++ b/docker-compose-standalone.yml @@ -22,6 +22,9 @@ services: dockerfile: Dockerfile environment: DOCKER_HOST_IP: ${DOCKER_HOST_IP} + KAFKA_TYPE: ${KAFKA_TYPE} + CONFLUENT_KEY: ${CONFLUENT_KEY} + CONFLUENT_SECRET: ${CONFLUENT_SECRET} PPM_CONFIG_FILE: ppmBsm.properties REDACTION_PROPERTIES_PATH: ${REDACTION_PROPERTIES_PATH} PPM_LOG_TO_FILE: ${PPM_LOG_TO_FILE} diff --git a/docker-compose.yml b/docker-compose.yml index f5c7aa11..e4d530c1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -29,9 +29,13 @@ services: volumes: - ${DOCKER_SHARED_VOLUME}:/ppm_data environment: + DOCKER_HOST_IP: ${DOCKER_HOST_IP} + KAFKA_TYPE: ${KAFKA_TYPE} + CONFLUENT_KEY: ${CONFLUENT_KEY} + CONFLUENT_SECRET: ${CONFLUENT_SECRET} + PPM_CONFIG_FILE: ${PPM_CONFIG_FILE} REDACTION_PROPERTIES_PATH: ${REDACTION_PROPERTIES_PATH} PPM_LOG_TO_FILE: ${PPM_LOG_TO_FILE} PPM_LOG_TO_CONSOLE: ${PPM_LOG_TO_CONSOLE} RPM_DEBUG: ${RPM_DEBUG} - PPM_CONFIG_FILE: ${PPM_CONFIG_FILE} PPM_LOG_LEVEL: ${PPM_LOG_LEVEL} \ No newline at end of file diff --git a/docs/diagrams/class-usage/PPM Class Usage With Files.drawio b/docs/diagrams/class-usage/PPM Class Usage With Files.drawio deleted file mode 100644 index b8fada3d..00000000 --- a/docs/diagrams/class-usage/PPM Class Usage With Files.drawio +++ /dev/null @@ -1 +0,0 @@ -7VxdW6M6EP41XnafQqC0l1pb9Rzd07PdU/XKJ0JsUSAIqW33128ooUCSxbqVr9O9kgwJNe9M3plMBk7A0F1fBNBf3GALOSdq11qfgPMTVVWVrk7/RJJNLFH6Sj+WzAPbYrJUMLV/ICbsMunStlCY60gwdojt54Um9jxkkpwMBgFe5bs9YSf/qz6cI0EwNaEjSm9tiyxiaV81UvklsueL5JeV3iC+48KkM5tJuIAWXmVEYHQChgHGJL5y10PkROgluNxebW6d65fexV//hq/wv7O/v3+ddeKHjT8yZDeFAHnktx/tDJdPYK72B4++a8/+6V34AHdAL372G3SWDDDfd7+Yvs/mTDYJkAFeehaKHtY9AWerhU3Q1IdmdHdFbYfKFsR1aEuhl3v+x2xmbyggaJ3RF5vBBcIuIsGGdkkMssuUkZhjj7VXqW4VjckWGb32mQwyc5rvHp1CRi8YanIEf0wfTg1wf4mCzfmdq+E38mzsppAC2EzkOqqWh07TJNDpJUFnTY2v/Tft+cZauSM8W1+tVDoHta3YAXVP7IzSsGuP3fXz2OkDEbvdsq7G7rptwa73PnTKoEroBi1BDih7IFeWn5Ai128JcuqAozq9bjchhijNRG6HSpHNlYWcPLoTl+tj6F5Cz3JQ0Nwgz9jDAstau0QPZ3f62Lp+8CcT9eH2cXx3IYnxRt6S4hguDgPwyXacIXZwsB0LhsORPh6XyYpdLY+sLHpWJcj2ykIWCMheWd+QBU1CQWkXtmreanvdurHVBGxnyMGmTTZj2yGoZfgqPb1h+OoCvmfTm3aB2qFeKs8ISbsKVKU+S3TuyJqjKWvigCzwHHvQGaXSszzGaZ9rjH2G7DMiZMOyU3BJcB53tLbJXTT8C51/3LzP3Dpfs0dvG5uk4dH5xqP0pHmfvZcO27aScR9TZoiXgYkKzJD5eAKDOSIFwLLnRWgWmkaAHEjst3z6TKbm7dDTIICbTAcf2x4JM0+eRILMMuYDc5XLWnH9gV7UnV7E/0Bqb7uZHBA21WqCGfvLmKPcBMs3pSLmq9mUOpzHBYZeaEpCf10v35aMeulsfzaryZS0zzalg5xPv15tGf8H51MU59ftfLidpdZ/x/n0C/vnGUMYrfGJT52LmWLU2CjOcj+BepIUcbXWXJOBfXp0c9ChUIMiiOIAYkcjSo5EUk6pjUakuKqNoJEOfxo0KKYRvn8hjfzG6i+CNL87Zak/wTobvUnlaLTSpJV8f1ALsWY8vvEhl5/wgpLlhZJiQbkjAnuS+KARy1vIPyvFy5uPKrj+5ewrxJT0N+jb1nOIvXbmTzv6HvlTo9JlXo8XT5e5WlFkL529VteaPUxntey3a4L+09MwB0EvHje8wKcXaGIvXLpNPm7sqMLRgsA8u3qCLPMopR3cSiqj0JogzwoFCOkkSR6nkAT4BSXc7WEPcXTORNCx5x5tmhRPGhOCswgy24TOKbvh2pa1XSAyxeRVV5ZueF+sSAqvehLVgNI0I55YLkN0dGrhsviqpJawLLVIN4Z7VHS1zw8oeisdgSIejB7hEukAbldQ4RKRq0WsozpCtXAVlRqoWyvGH61ERaz5CEyTlHxVqxaxXPMI1cK7edAXq0OqVYuY1TxCtSica9kdNNWlFlXc+43W0PUdCvh29zd8FFTU7GSUqucJyRARrrgsSvQTN5uoAnUCqX0Sm1pv0HqUFQN8MSosQOucDvXx5Fx/7ZKZY/qvl+u7Z8lW4ggphiu7UioMk6RKEdPdR6iUjsJVMO3WT23+uCuooIJtdriAfnTnyUHr0+gV4YO23vJyAhb/vV93UFsBk/RdGTFvGJ8GUf8wCbAf2TQKb6AH5207BZZkaSs8IJKCLUY9x0hKfDDarY6TpFppUO2NUXf1bhFH1F1Eo8rtZt/q3aSa7pf9lV+U333WsXxRgVKGEia+e43nLaRb9f2UTGmBuRTbP3Qb6YV7PVeTvLBTFt0WBkBZk5+07DUoPs+l1L7dByKq0VdzmnuIzn+YZbc1qO+lXTGF+51C2FLLLMESaTP9eFHsANNvQIHRTw== \ No newline at end of file diff --git a/docs/diagrams/class-usage/PPM Class Usage With Files.drawio.png b/docs/diagrams/class-usage/PPM Class Usage With Files.drawio.png deleted file mode 100644 index 5846c7c0..00000000 Binary files a/docs/diagrams/class-usage/PPM Class Usage With Files.drawio.png and /dev/null differ diff --git a/docs/diagrams/class-usage/PPM Class Usage.drawio b/docs/diagrams/class-usage/PPM Class Usage.drawio new file mode 100644 index 00000000..64f6f9fa --- /dev/null +++ b/docs/diagrams/class-usage/PPM Class Usage.drawio @@ -0,0 +1 @@ +7VxRd6o4EP41nrP74B5JQPSxWm27t73rvd617VNPKqnSAqEQq95fv0FAIKHU2kJk7VPJkCD5vsnMZDK0Afv26sxD7vyKGNhqgJaxasDTBgBAaWnsTyBZh5K22g0FM880QpGSCMbmbxwJW5F0YRrYz3SkhFjUdLPCKXEcPKUZGfI8ssx2eyBW9lddNMOCYDxFlii9Ng06D6UdoCfyc2zO5vEvK+1ofjaKO0cz8efIIMuUCA4asO8RQsMre9XHVgBejMv1xfraunxqn/39w39G//a+/fo+aYYPG75nyHYKHnbo3o+mmj+50YbG5Z07GoG76/vhzVn86BdkLSK8Bs7CPkf+PJozXcdAemThGDh4WKsBe8u5SfHYRdPg7pKpDpPNqW2xlsIuH0zL6hOLeJuxsN8faEP2Yr0dZxK/FvYoXqV4jGZ2homNqbdmXaK7sKWGQyIthe2ItGXCuRITOU/xHfdDkZrNto9OoGQXEZrvQBYKyF4YP7GBppSBUi9sY9xiC9CSja0qYDvBFpmadD00LYprhq/S1g4MX03Atze+qheoTQCyqMK4XQWqVn/xAGeg0713bXPyT/vMhaQJBASxwVxV1CQenZMZcZA1SKS9LMZJn0tC3AjZR0zpOvK7aEFJFne8MulNMPwvNv+weZu6dbqKHr1prOOGw+YbjtLi5m36XjJs04rHvY9Mnyy8KS5Qw8gRU+TNMC0ANnpegGahanjYQtR8yQYGeTRvhp54HlqnOrjEdKifevIoEKSWsZI1kxrg/DHXH2pF3dlF+AKJvm1nsr8KQqkqmNK/lDrmq2D5qlRk+SSrUpPzuFDXClVJ6K9p5euSLtec7W7NJKmS+tmq9CHn05HLlv5/cD5Fcb5s56NnLYDaecP5dAr7Zy2GMFrlRmsaFzOFqEWjOM39BNOjKDK0WZKCfXp0s6sd+T2+O9Hh7Tlmi+zGVskLfdQPKYIoDiC2ZkTJGJHEpkgzI7m4goMwI02VCyS6xWaE719oRvZY/UWQZnen58gxrLrt/DkzWmnSKn9/IMWwpjy+/i6XfwDRoKLuaMa7B7HAARcnQKV4gfNxBde/nJ2FmJT+iVzTePSJU88MalPbIYOqV7rQ5fjxZKGDvRa60tg9ANh/oS/BuKsub4E21CYX183Tl7U5jg5XZK1zMQTPZo8UPgIP51NaBA6PYPf/ITUoJ5EkBmB81hGouynCuyPDNvc7+huRIX+qwQ0oyXMAwXXgFcWO4Qvqykw3zWqXTz3yhGO/4BAHc64iEiHLnDmsOWUaxSJO2AscgTlF1kl0wzYNY6PweW4ouwjK8ji8n9+GkSmP085xOLAsh6OI56ELHx8dLa+t1gpoyd12tgRWZNj1/W10vq5psoz0x5aIeOx6hEukCbkdR4VLJJ+W9hctCeRxogfKZkX/YoXd7WaDLFWTTUvnixbRzcOOWHtSLS1izvQIaVE417I9xpJFCxAL2QYrZLsWA9zxFzbu3wsUHXaii09pS89oA9FPXK2D+tYRYvpJTaa9Xu1R3uYOq4C5edLXhqNT7blFJ9bUfT5f3Tzm7COO0L5wVThV7rlzSRHz6EdISpPLhGx9szRf3BIYqGCL7c+RG9x5sPDqJPisooSjsCj2e7uiQVpplDHWv3de1McrY2kPyGR1sQST+Og9fdy0OWVivmHkETdQaexfIQfN6na+3AS7lJaXdfCUC7YY8RyjTeID0VZ1+4NcVg6oqkeXXRdcZCNkl+eAfL3ZtS44rtN7/dDmlcK+zzq0KSp9SpmEkWtfklkNzS14Ox1TWlyei+2XuQ146XLLJudToLLMbWEAlFb5Uc0+sOJzXJXuQQvjzxSqvwixagprFTDmVjjIKQHcJxb4WIkv58vLrREpyE1VsO3JfUtwODy/8S1Y7XlWZfIspua+oYcnFGWYSw2xBLO3K/ivu3KFT7IpFX7Wm4tvDQtbyuCFK3Gt8MQrl5UaBr4VsKLokmkRS1z+sJHpMNHUQr7/55ERBLnvZ5ScZZOXntuDH9ZM/lVNuHVP/uEPHPwH \ No newline at end of file diff --git a/docs/diagrams/class-usage/PPM Class Usage.drawio.png b/docs/diagrams/class-usage/PPM Class Usage.drawio.png new file mode 100644 index 00000000..9741bdcd Binary files /dev/null and b/docs/diagrams/class-usage/PPM Class Usage.drawio.png differ diff --git a/include/general-redaction/redactionPropertiesManager.hpp b/include/general-redaction/redactionPropertiesManager.hpp index 9978b7c1..12452d72 100644 --- a/include/general-redaction/redactionPropertiesManager.hpp +++ b/include/general-redaction/redactionPropertiesManager.hpp @@ -69,12 +69,6 @@ class RedactionPropertiesManager { */ void addField(std::string fieldToAdd); - /** - * @brief Prints the fields. For debugging purposes. - * - */ - void printFields(); - private: bool debug; std::vector fieldsToRedact; diff --git a/include/kafka_consumer.hpp b/include/kafka_consumer.hpp new file mode 100644 index 00000000..7b1ce4a3 --- /dev/null +++ b/include/kafka_consumer.hpp @@ -0,0 +1,127 @@ +#ifndef CVDP_KAFKA_CONSUMER_H +#define CVDP_KAFKA_CONSUMER_H + +#include "librdkafka/rdkafkacpp.h" +#include +#include + +#ifndef _MSC_VER +#include +#endif + +#ifdef _MSC_VER +#include "../../win32/wingetopt.h" +#include +#elif _AIX +#include +#else +#include +#include +#endif + +#include "bsmHandler.hpp" +#include "ppmLogger.hpp" +#include "cvlib.hpp" + +#include + +/** + * @brief This class is used to consume messages from Kafka + * + */ +class KafkaConsumer { + public: + /** + * @brief Check if topic is available + * + * @param topic The name of the topic + * @param consumer The RdKafka::KafkaConsumer object + * @return true if topic is available + * @return false if topic is not available + */ + bool ode_topic_available(const std::string& topic, std::shared_ptr consumer); + + /** + * @brief Consume message + * + * @param message The RdKafka::Message object to be consumed + * @param opaque The opaque object + * @param handler The BSMHandler object + * @return RdKafka::ErrorCode The error code + */ + RdKafka::ErrorCode msg_consume(RdKafka::Message* message, void* opaque, BSMHandler& handler); + + /** + * @brief Configure Kafka consumer + * + * @param config_file The path to the configuration file + * @param pconf The privacy configuration object + * @param conf The global configuration object + * @param tconf The topic configuration object + * @return true if configuration is successful + * @return false if configuration is not successful + */ + bool configure(const std::string& config_file, std::unordered_map& pconf, RdKafka::Conf *conf, RdKafka::Conf *tconf); + + /** + * @brief Execute Kafka consumer + * + * @param argc The number of arguments + * @param argv The array of arguments + * @return int reflecting the exit status + */ + int execute(int argc, char **argv); + + private: + /** + * @brief Pointer to the PPM logger instance + * + */ + std::shared_ptr logger = std::make_shared("log"); + + /** + * @brief Exit flag + * + */ + bool exit_eof = false; + + /** + * @brief Counter for EOF + * + */ + int eof_cnt = 0; + + /** + * @brief Counter for partition + * + */ + int partition_cnt = 0; + + /** + * @brief Counter for messages + * + */ + long msg_cnt = 0; + + /** + * @brief Number of bytes in messages + * + */ + int64_t msg_bytes = 0; + + /** + * @brief Verbosity level + * + */ + int verbosity = 1; + + /** + * @brief Retrieve environment variable + * + * @param variableName The name of the environment variable + * @return const char* The value of the environment variable + */ + const char* getEnvironmentVariable(const char* variableName); +}; + +#endif // CVDP_KAFKA_CONSUMER_H \ No newline at end of file diff --git a/kafka-test/CMakeLists.txt b/kafka-test/CMakeLists.txt index 6658c33e..f5962711 100644 --- a/kafka-test/CMakeLists.txt +++ b/kafka-test/CMakeLists.txt @@ -32,6 +32,7 @@ # Cyber and Information Security Research (CISR) Group Oak Ridge National # Laboratory # 865-804-5161 (mobile) + cmake_minimum_required(VERSION 2.6) project(CVDIGeofenceTest) diff --git a/src/bsmHandler.cpp b/src/bsmHandler.cpp index 96a4f763..1101a2f7 100644 --- a/src/bsmHandler.cpp +++ b/src/bsmHandler.cpp @@ -60,7 +60,7 @@ BSMHandler::BSMHandler(Quad::Ptr quad_ptr, const ConfigMap& conf, std::shared_pt logger_{ logger } { if (logger_ == nullptr) { - std::cout << "BSMHandler::BSMHandler(): Logger is null! Returning." << std::endl; + std::cerr << "BSMHandler::BSMHandler(): Logger is null! Returning." << std::endl; // cannot use logger here because it is null return; } diff --git a/src/general-redaction/redactionPropertiesManager.cpp b/src/general-redaction/redactionPropertiesManager.cpp index 21143100..b7caf2b2 100644 --- a/src/general-redaction/redactionPropertiesManager.cpp +++ b/src/general-redaction/redactionPropertiesManager.cpp @@ -85,18 +85,6 @@ void RedactionPropertiesManager::addField(std::string fieldToAdd) { fieldsToRedact.push_back(fieldToAdd); } -/** - * @brief Prints the fields to the console. For debugging purposes. - * - */ -void RedactionPropertiesManager::printFields() { - logToFile("printing fields"); - std::cout << "=== Fields to Redact ===" << std::endl; - for (std::string field: fieldsToRedact) { - std::cout << field.c_str() << std::endl; - } -} - /** * @brief Logs the message to a file if the debug flag is set to true. * @@ -106,7 +94,7 @@ void RedactionPropertiesManager::logToFile(std::string message) { if (debug) { std::fstream logFile("rpm_log.txt", std::ios_base::app); if (!logFile) { - std::cout << "error opening file" << std::endl; + std::cerr << "error opening file" << std::endl; } logFile << message << std::endl; logFile.close(); diff --git a/src/kafka_consumer.cpp b/src/kafka_consumer.cpp index bb9514fa..6f456fa9 100644 --- a/src/kafka_consumer.cpp +++ b/src/kafka_consumer.cpp @@ -49,56 +49,15 @@ * POSSIBILITY OF SUCH DAMAGE. */ -#include "librdkafka/rdkafkacpp.h" -#include -#include - -#ifndef _MSC_VER -#include -#endif - -#ifdef _MSC_VER -#include "../win32/wingetopt.h" -#include -#elif _AIX -#include -#else -#include -#include -#endif - -#include "bsmHandler.hpp" -#include "ppmLogger.hpp" -#include "cvlib.hpp" - -#include - -static std::shared_ptr logger = std::make_shared("info.log", "error.log"); - -const char* getEnvironmentVariable(const char* variableName) { - const char* toReturn = getenv(variableName); - if (!toReturn) { - logger->error("Something went wrong attempting to retrieve the environment variable " + std::string(variableName)); - toReturn = ""; - } - return toReturn; -} +#include "kafka_consumer.hpp" static bool run = true; -static bool exit_eof = false; -static int eof_cnt = 0; -static int partition_cnt = 0; -static long msg_cnt = 0; -static int64_t msg_bytes = 0; - -static int verbosity = 1; - -static void sigterm (int sig) { - run = false; +static void sigterm(int sig) { + run = false; } -static bool ode_topic_available( const std::string& topic, std::shared_ptr consumer ) { +bool KafkaConsumer::ode_topic_available(const std::string& topic, std::shared_ptr consumer) { bool r = false; RdKafka::Metadata* md; @@ -118,11 +77,7 @@ static bool ode_topic_available( const std::string& topic, std::shared_ptr(message->payload()), message->len()); @@ -136,7 +91,7 @@ RdKafka::ErrorCode msg_consume(RdKafka::Message* message, void* opaque, BSMHandl msg_cnt++; msg_bytes += message->len(); if (verbosity >= 3) { - logger->error("Read msg at offset " + std::to_string(message->offset())); + logger->info("Read msg at offset " + std::to_string(message->offset())); } RdKafka::MessageTimestamp ts; @@ -189,36 +144,7 @@ RdKafka::ErrorCode msg_consume(RdKafka::Message* message, void* opaque, BSMHandl return message->err(); } -/* Use of this partitioner is pretty pointless since no key is provided - * in the produce() call. */ -class MyHashPartitionerCb : public RdKafka::PartitionerCb { - public: - int32_t partitioner_cb (const RdKafka::Topic *topic, const std::string *key, - int32_t partition_cnt, void *msg_opaque) { - return djb_hash(key->c_str(), key->size()) % partition_cnt; - } - private: - - static inline unsigned int djb_hash (const char *str, size_t len) { - unsigned int hash = 5381; - for (size_t i = 0 ; i < len ; i++) - hash = ((hash << 5) + hash) + str[i]; - return hash; - } -}; - -/** - * NOTE: This is supposed to be a little more efficient. - */ -class ExampleConsumeCb : public RdKafka::ConsumeCb { - public: - void consume_cb (RdKafka::Message &msg, void *opaque) { - //msg_consume(&msg, opaque); - } -}; - -bool configure( const std::string& config_file, std::unordered_map& pconf, RdKafka::Conf *conf, RdKafka::Conf *tconf ) { - +bool KafkaConsumer::configure(const std::string& config_file, std::unordered_map& pconf, RdKafka::Conf *conf, RdKafka::Conf *tconf) { std::string line; std::string errstr; std::vector pieces; @@ -243,10 +169,7 @@ bool configure( const std::string& config_file, std::unordered_map pconf; std::string brokers = "localhost"; @@ -265,7 +188,6 @@ int main (int argc, char **argv) { int opt; // command line option. - int use_ccb = 0; // consumer callback use flag. bool do_conf_dump = false; int64_t start_offset = RdKafka::Topic::OFFSET_BEGINNING; @@ -291,7 +213,7 @@ int main (int argc, char **argv) { case 'g': if (conf->set("group.id", optarg, errstr) != RdKafka::Conf::CONF_OK) { - logger->error(errstr); + logger->critical(errstr); exit(1); } break; @@ -395,22 +317,12 @@ int main (int argc, char **argv) { if (!debug.empty()) { if (conf->set("debug", debug, errstr) != RdKafka::Conf::CONF_OK) { - logger->error(errstr); + logger->critical(errstr); exit(1); } } - ExampleConsumeCb ex_consume_cb; - - if(use_ccb) { - conf->set("consume_cb", &ex_consume_cb, errstr); - } - - // ExampleEventCb ex_event_cb; - // conf->set("event_cb", &ex_event_cb, errstr); - if (do_conf_dump) { - //if (true) { // dump the configuration and then exit. // TODO: build a dump method into the conf..? for (int pass = 0 ; pass < 2 ; pass++) { @@ -475,7 +387,7 @@ int main (int argc, char **argv) { if ( search != pconf.end() ) { region_file = search->second; } else { - logger->error("No map file specified."); + logger->critical("No map file specified."); exit(EXIT_FAILURE); } @@ -505,10 +417,8 @@ int main (int argc, char **argv) { } } catch ( std::exception& e ) { - - logger->error(e.what()); + logger->critical(e.what()); exit(0); - } // Declare a quad with the given bounds. @@ -532,8 +442,8 @@ int main (int argc, char **argv) { } - } catch ( std::exception& e ) { - logger->error("Problem building geofence: " + std::string(e.what())); + } catch (std::exception& e) { + logger->critical("Problem building geofence: " + std::string(e.what())); delete tconf; delete conf; exit(EXIT_FAILURE); @@ -545,7 +455,7 @@ int main (int argc, char **argv) { std::shared_ptr consumer{RdKafka::KafkaConsumer::create(conf, errstr)}; if (!consumer) { - logger->error("Failed to create consumer: " + errstr); + logger->critical("Failed to create consumer: " + errstr); exit(EXIT_FAILURE); } @@ -556,13 +466,13 @@ int main (int argc, char **argv) { if ( search != pconf.end() ) { topics.push_back( search->second ); } else { - logger->error("Failure to use configured consumer topic: " + errstr); + logger->critical("Failure to use configured consumer topic: " + errstr); exit(EXIT_FAILURE); } for ( const std::string& topic : topics ) { if ( !ode_topic_available( topic, consumer )) { - logger->error("The ODE Topic: " + topic + " is not available. This topic must be readable."); + logger->critical("The ODE Topic: " + topic + " is not available. This topic must be readable."); exit(EXIT_FAILURE); } } @@ -570,14 +480,14 @@ int main (int argc, char **argv) { // subscribe to the J2735BsmJson topic (or test) RdKafka::ErrorCode err = consumer->subscribe(topics); if (err) { - logger->error("Failed to subscribe to " + std::to_string(topics.size()) + " topics: " + RdKafka::err2str(err)); + logger->critical("Failed to subscribe to " + std::to_string(topics.size()) + " topics: " + RdKafka::err2str(err)); exit(EXIT_FAILURE); } // Producer setup: will take the filtered BSMs and send them back to the ODE (or a test java consumer). RdKafka::Producer *producer = RdKafka::Producer::create(conf, errstr); if (!producer) { - logger->error("Failed to create producer: " + errstr); + logger->critical("Failed to create producer: " + errstr); exit(EXIT_FAILURE); } @@ -589,7 +499,7 @@ int main (int argc, char **argv) { if ( search != pconf.end() ) { topic_str = search->second; } else { - logger->error("Topic std::String Empty!"); + logger->critical("Topic std::String Empty!"); exit(EXIT_FAILURE); } } @@ -597,7 +507,7 @@ int main (int argc, char **argv) { // The topic we are publishing filtered BSMs to. RdKafka::Topic *topic = RdKafka::Topic::create(producer, topic_str, tconf, errstr); if (!topic) { - logger->error("Failed to create topic: " + errstr); + logger->critical("Failed to create topic: " + errstr); exit(EXIT_FAILURE); } @@ -608,43 +518,37 @@ int main (int argc, char **argv) { RdKafka::Message *msg = consumer->consume(1000); - if (!use_ccb) { - - status = msg_consume(msg, NULL, handler); - - switch (status) { - case RdKafka::ERR__TIMED_OUT: - break; - - case RdKafka::ERR_NO_ERROR: - { - const BSM& bsm = handler.get_bsm(); - std::stringstream ss; - ss << "Retaining BSM: " << bsm << "\n"; - logger->info(ss.str()); - - // if we still have a message in the handler, we send it back out to the producer we have made above. - status = producer->produce(topic, partition, RdKafka::Producer::RK_MSG_COPY, (void *)handler.get_json().c_str(), handler.get_bsm_buffer_size(), NULL, NULL); - if (status != RdKafka::ERR_NO_ERROR) { - logger->error("% Produce failed: " + RdKafka::err2str( status )); - } - } - break; - - case RdKafka::ERR_INVALID_MSG: - { - const BSM& bsm = handler.get_bsm(); - std::stringstream ss; - ss << "Filtering BSM [" << handler.get_result_string() << "] : " << bsm << "\n"; - logger->info(ss.str()); - } - break; - - default: - ; - } + status = msg_consume(msg, NULL, handler); + + switch (status) { + case RdKafka::ERR__TIMED_OUT: + break; + + case RdKafka::ERR_NO_ERROR: + { + const BSM& bsm = handler.get_bsm(); + std::stringstream ss; + ss << "Retaining BSM: " << bsm << "\n"; + logger->info(ss.str()); + + // if we still have a message in the handler, we send it back out to the producer we have made above. + status = producer->produce(topic, partition, RdKafka::Producer::RK_MSG_COPY, (void *)handler.get_json().c_str(), handler.get_bsm_buffer_size(), NULL, NULL); + if (status != RdKafka::ERR_NO_ERROR) { + logger->error("% Produce failed: " + RdKafka::err2str( status )); + } + } + break; + case RdKafka::ERR_INVALID_MSG: + { + const BSM& bsm = handler.get_bsm(); + std::stringstream ss; + ss << "Filtering BSM [" << handler.get_result_string() << "] : " << bsm << "\n"; + logger->info(ss.str()); + } + break; } + delete msg; } @@ -662,15 +566,31 @@ int main (int argc, char **argv) { logger->info(">> Consumed " + std::to_string(msg_cnt) + " messages (" + std::to_string(msg_bytes) + " bytes)"); /* - * Wait for RdKafka to decommission. - * This is not strictly needed (with check outq_len() above), but - * allows RdKafka to clean up all its resources before the application - * exits so that memory profilers such as valgrind wont complain about - * memory leaks. - */ + * Wait for RdKafka to decommission. + * This is not strictly needed (with check outq_len() above), but + * allows RdKafka to clean up all its resources before the application + * exits so that memory profilers such as valgrind wont complain about + * memory leaks. + */ RdKafka::wait_destroyed(5000); return 0; } +const char* KafkaConsumer::getEnvironmentVariable(const char* variableName) { + const char* toReturn = getenv(variableName); + if (!toReturn) { + logger->warn("The '" + std::string(variableName) + "' environment variable was not set."); + toReturn = ""; + } + return toReturn; +} + +#ifndef _PPM_TESTS + +int main(int argc, char **argv) { + KafkaConsumer test; + test.execute(argc, argv); +} + #endif diff --git a/src/ppm.cpp b/src/ppm.cpp index b6ba5c23..c7a26c20 100644 --- a/src/ppm.cpp +++ b/src/ppm.cpp @@ -759,14 +759,13 @@ int PPM::operator()(void) { signal(SIGTERM, sigterm); try { - // throws for mapfile and other items. - if ( !configure() ) return EXIT_FAILURE; - - } catch ( std::exception& e ) { + if (!configure()) { + return EXIT_FAILURE; + } - // don't use logger in case we cannot configure it correctly. - std::cerr << "Fatal std::Exception: " << e.what() << std::endl; + } catch (std::exception& e) { + logger->critical("Fatal std::Exception: " + std::string(e.what())); return EXIT_FAILURE; } @@ -793,7 +792,7 @@ int PPM::operator()(void) { RdKafka::ErrorCode err = consumer->position(partitions); if (err) { - logger->info("err " + RdKafka::err2str(err)); + logger->error("err " + RdKafka::err2str(err)); } else { for (auto *partition : partitions) { logger->info("topar " + partition->topic() + " " + std::to_string(partition->offset())); @@ -833,7 +832,7 @@ int PPM::operator()(void) { const char* PPM::getEnvironmentVariable(const char* variableName) { const char* toReturn = getenv(variableName); if (!toReturn) { - logger->error("Something went wrong attempting to retrieve the environment variable " + std::string(variableName)); + logger->warn("The environment variable '" + std::string(variableName) + "' was not set."); toReturn = ""; } return toReturn; @@ -845,36 +844,36 @@ int main( int argc, char* argv[] ) { PPM ppm{"ppm","Privacy Protection Module"}; - ppm.addOption( 'c', "config", "Configuration for Kafka and Privacy Protection Module.", true ); - ppm.addOption( 'C', "config-check", "Check the configuration and output the settings.", false ); - ppm.addOption( 'u', "unfiltered-topic", "The unfiltered consume topic.", true ); - ppm.addOption( 'f', "filtered-topic", "The unfiltered produce topic.", true ); - ppm.addOption( 'p', "partition", "Consumer topic partition from which to read.", true ); - ppm.addOption( 'g', "group", "Consumer group identifier", true ); - ppm.addOption( 'b', "broker", "List of broker addresses (localhost:9092)", true ); - ppm.addOption( 'o', "offset", "Byte offset to start reading in the consumed topic.", true ); - ppm.addOption( 'x', "exit", "Exit consumer when last message in partition has been received.", false ); - ppm.addOption( 'd', "debug", "debug level.", true ); - ppm.addOption( 'm', "mapfile", "Map data file to specify the geofence.", true ); - ppm.addOption( 'v', "log-level", "The info log level [trace,debug,info,warning,error,critical,off]", true ); - ppm.addOption( 'D', "log-dir", "Directory for the log files.", true ); - ppm.addOption( 'R', "log-rm", "Remove specified/default log files if they exist.", false ); - ppm.addOption( 'i', "log", "Log file name.", true ); - ppm.addOption( 'h', "help", "print out some help" ); + ppm.addOption('c', "config", "Configuration for Kafka and Privacy Protection Module.", true); + ppm.addOption('C', "config-check", "Check the configuration and output the settings.", false); + ppm.addOption('u', "unfiltered-topic", "The unfiltered consume topic.", true); + ppm.addOption('f', "filtered-topic", "The unfiltered produce topic.", true); + ppm.addOption('p', "partition", "Consumer topic partition from which to read.", true); + ppm.addOption('g', "group", "Consumer group identifier", true); + ppm.addOption('b', "broker", "List of broker addresses (localhost:9092)", true); + ppm.addOption('o', "offset", "Byte offset to start reading in the consumed topic.", true); + ppm.addOption('x', "exit", "Exit consumer when last message in partition has been received.", false); + ppm.addOption('d', "debug", "debug level.", true); + ppm.addOption('m', "mapfile", "Map data file to specify the geofence.", true); + ppm.addOption('v', "log-level", "The info log level [trace,debug,info,warning,error,critical,off]", true); + ppm.addOption('D', "log-dir", "Directory for the log files.", true); + ppm.addOption('R', "log-rm", "Remove specified/default log files if they exist.", false); + ppm.addOption('i', "log", "Log file name.", true); + ppm.addOption('h', "help", "print out some help"); if (!ppm.parseArgs(argc, argv)) { ppm.usage(); - exit( EXIT_FAILURE ); + exit(EXIT_FAILURE); } if (ppm.optIsSet('h')) { ppm.help(); - exit( EXIT_SUCCESS ); + exit(EXIT_SUCCESS); } // can set levels if needed here. - if ( !ppm.make_loggers( ppm.optIsSet('R') )) { - exit( EXIT_FAILURE ); + if (!ppm.make_loggers(ppm.optIsSet('R'))) { + exit(EXIT_FAILURE); } // configuration check. @@ -882,18 +881,18 @@ int main( int argc, char* argv[] ) try { if (ppm.configure()) { ppm.print_configuration(); - exit( EXIT_SUCCESS ); + exit(EXIT_SUCCESS); } else { - ppm.logger->error( "current configuration settings do not work; exiting." ); - exit( EXIT_FAILURE ); + ppm.logger->critical("current configuration settings do not work; exiting."); + exit(EXIT_FAILURE); } - } catch ( std::exception& e ) { - ppm.logger->error( "std::exception: " + std::string( e.what() )); - exit( EXIT_FAILURE ); + } catch (std::exception& e) { + ppm.logger->critical("std::exception: " + std::string(e.what())); + exit(EXIT_FAILURE); } } - exit( ppm.run() ); + exit(ppm.run()); } #endif diff --git a/src/tests.cpp b/src/tests.cpp index f029bd0c..7eba1c03 100644 --- a/src/tests.cpp +++ b/src/tests.cpp @@ -1702,14 +1702,16 @@ TEST_CASE( "BSMHandler JSON General Redaction Only", "[ppm][redaction][generalon rapidjson::ParseResult preliminaryParseResult = doc.Parse(test_case.c_str()); REQUIRE( preliminaryParseResult ); - int numMembersPresentBeforeRedaction = 0; - for (std::string memberPath : rpm.getFields()) { - bool found = handler.getRapidjsonRedactor().searchForMemberByPath(doc, memberPath); - if (found) { - numMembersPresentBeforeRedaction++; + if (rpm.getNumFields() > 0) { + int numMembersPresentBeforeRedaction = 0; + for (std::string memberPath : rpm.getFields()) { + bool found = handler.getRapidjsonRedactor().searchForMemberByPath(doc, memberPath); + if (found) { + numMembersPresentBeforeRedaction++; + } } + REQUIRE( numMembersPresentBeforeRedaction > 0 ); } - REQUIRE( numMembersPresentBeforeRedaction > 0 ); // process test case to build BSM (redaction will occur here) CHECK( handler.process( test_case ) ); @@ -1776,14 +1778,16 @@ TEST_CASE( "BSMHandler JSON General Redaction w/ All Flags", "[ppm][redaction][g rapidjson::ParseResult preliminaryParseResult = doc.Parse(test_case.c_str()); REQUIRE( preliminaryParseResult ); - int numMembersPresentBeforeRedaction = 0; - for (std::string memberPath : rpm.getFields()) { - bool found = handler.getRapidjsonRedactor().searchForMemberByPath(doc, memberPath); - if (found) { - numMembersPresentBeforeRedaction++; + if (rpm.getNumFields() > 0) { + int numMembersPresentBeforeRedaction = 0; + for (std::string memberPath : rpm.getFields()) { + bool found = handler.getRapidjsonRedactor().searchForMemberByPath(doc, memberPath); + if (found) { + numMembersPresentBeforeRedaction++; + } } + REQUIRE( numMembersPresentBeforeRedaction > 0 ); } - REQUIRE( numMembersPresentBeforeRedaction > 0 ); // process test case to build BSM (redaction will occur here) CHECK( handler.process( test_case ) ); diff --git a/src/tool.cpp b/src/tool.cpp index b786f86d..4a3d7a5e 100644 --- a/src/tool.cpp +++ b/src/tool.cpp @@ -281,7 +281,7 @@ bool Tool::parseArgs(int argc, char* argv[]) return false; case 1 : - std::cout << "not sure what this does... case 1. optarg = " << optarg << std::endl; + std::cerr << "Unknown case 1. optarg = " << optarg << std::endl; break; case '?' :