Skip to content

Commit

Permalink
Release 5.12.0
Browse files Browse the repository at this point in the history
  • Loading branch information
APiankouski authored Sep 19, 2024
2 parents cadd4fa + 4e17bd2 commit 4d8830a
Show file tree
Hide file tree
Showing 217 changed files with 6,659 additions and 5,495 deletions.
1 change: 1 addition & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
[flake8]
ignore = E741, W503
exclude = .git,venv,env,fixtures
max-line-length = 119
2 changes: 1 addition & 1 deletion .github/workflows/build-dev-image.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4

- name: Create variables
id: vars
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build-feature-image.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
if: (!startsWith(github.head_ref, 'rc/') || !startsWith(github.head_ref, 'hotfix/') || !startsWith(github.head_ref, 'master') || !startsWith(github.head_ref, 'main'))
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4

- name: Create variables
id: vars
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build-rc-image.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
environment: rc
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4

- name: Create variables
id: vars
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/dockerhub-release.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ jobs:
if: github.event.pull_request.base.ref == 'master' || github.event.pull_request.base.ref == 'main'
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4

- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v2
Expand Down
13 changes: 13 additions & 0 deletions .github/workflows/sync-jira-versions.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
name: Add GitHub release version to Jira issues

on:
pull_request:
types: [opened, synchronize, reopened]

jobs:
call-jira-sync:
name: Call Jira versions update
uses: reportportal/.github/.github/workflows/update-jira-versions.yaml@main
with:
jira-server: ${{ vars.JIRA_SERVER }}
secrets: inherit
10 changes: 6 additions & 4 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,15 @@ on: [ push, pull_request ]
jobs:
build:
runs-on: ubuntu-latest
timeout-minutes: 20
steps:
- name: Checkout repository
uses: actions/checkout@v3
uses: actions/checkout@v4

- name: Set up Python
uses: actions/setup-python@v4
uses: actions/setup-python@v5
with:
python-version: '3.10'
python-version: '3.11'

- name: Install dependencies
run: |
Expand All @@ -41,7 +42,8 @@ jobs:
run: pytest --cov-config=.coveragerc --cov --cov-report=xml test/ -s -vv

- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
uses: codecov/codecov-action@v4
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: coverage.xml
flags: unittests
30 changes: 18 additions & 12 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
FROM --platform=${BUILDPLATFORM} bitnami/python:3.10.14 AS test
RUN apt-get update && apt-get install -y build-essential \
&& rm -rf /var/lib/apt/lists/* \
FROM registry.access.redhat.com/ubi8/python-311:latest AS test
USER root
RUN dnf -y upgrade \
&& python -m venv /venv \
&& mkdir /build
ENV VIRTUAL_ENV=/venv
Expand All @@ -13,17 +13,20 @@ RUN "${VIRTUAL_ENV}/bin/pip" install --upgrade pip \
RUN "${VIRTUAL_ENV}/bin/pip" install --no-cache-dir -r requirements-dev.txt
RUN make test-all


FROM --platform=${BUILDPLATFORM} bitnami/python:3.10.14 AS builder
RUN apt-get update && apt-get install -y build-essential libpcre3 libpcre3-dev \
&& rm -rf /var/lib/apt/lists/* \
FROM registry.access.redhat.com/ubi8/python-311:latest AS builder
USER root
RUN dnf -y upgrade && dnf -y install pcre-devel \
&& dnf -y remove emacs-filesystem libjpeg-turbo libtiff libpng wget \
&& dnf -y autoremove \
&& dnf clean all \
&& python -m venv /venv \
&& mkdir /build
ENV VIRTUAL_ENV=/venv
ENV PATH="${VIRTUAL_ENV}/bin:${PATH}"
WORKDIR /build
COPY ./ ./
RUN "${VIRTUAL_ENV}/bin/pip" install --upgrade pip \
&& "${VIRTUAL_ENV}/bin/pip" install --upgrade setuptools \
&& LIBRARY_PATH=/lib:/usr/lib /bin/sh -c "${VIRTUAL_ENV}/bin/pip install --no-cache-dir -r requirements.txt" \
&& "${VIRTUAL_ENV}/bin/python3" -m nltk.downloader -d /usr/share/nltk_data stopwords
ARG APP_VERSION=""
Expand All @@ -35,15 +38,18 @@ RUN mkdir /backend \
&& cp -r /build/app /backend/ \
&& cp -r /build/res /backend/


FROM --platform=${BUILDPLATFORM} bitnami/python:3.10.14
FROM registry.access.redhat.com/ubi8/python-311:latest
USER root
WORKDIR /backend/
COPY --from=builder /backend ./
COPY --from=builder /venv /venv
COPY --from=builder /usr/share/nltk_data /usr/share/nltk_data/
RUN apt-get update && apt-get -y upgrade \
&& apt-get install -y libxml2 libgomp1 curl libpcre3 libpcre3-dev \
&& rm -rf /var/lib/apt/lists/* \
RUN dnf -y upgrade && dnf -y install pcre-devel \
&& dnf -y remove emacs-filesystem libjpeg-turbo libtiff libpng wget \
&& dnf -y autoremove \
&& dnf clean all \
&& pip install --upgrade pip \
&& pip install --upgrade setuptools \
&& mkdir -p -m 0700 /backend/storage \
&& groupadd uwsgi && useradd -g uwsgi uwsgi \
&& chown -R uwsgi: /usr/share/nltk_data \
Expand Down
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

[![Tests](https://github.com/reportportal/service-auto-analyzer/actions/workflows/tests.yml/badge.svg)](https://github.com/reportportal/service-auto-analyzer/actions/workflows/tests.yml)
[![codecov](https://codecov.io/github/reportportal/service-auto-analyzer/branch/master/graph/badge.svg?token=Y3llbuAYLr)](https://codecov.io/github/reportportal/service-auto-analyzer)
[![Join Slack chat!](https://slack.epmrpp.reportportal.io/badge.svg)](https://slack.epmrpp.reportportal.io/)
[![Join Slack chat!](https://img.shields.io/badge/slack-join-brightgreen.svg)](https://slack.epmrpp.reportportal.io/)
[![stackoverflow](https://img.shields.io/badge/reportportal-stackoverflow-orange.svg?style=flat)](http://stackoverflow.com/questions/tagged/reportportal)
[![Build with Love](https://img.shields.io/badge/build%20with-❤%EF%B8%8F%E2%80%8D-lightgrey.svg)](http://reportportal.io?style=flat)

Expand Down Expand Up @@ -58,9 +58,9 @@
| PATTERN_LABEL_MIN_PERCENT | float | 0.9 | the value of minimum percent of the same issue type for pattern to be suggested as a pattern with a label |
| PATTERN_LABEL_MIN_COUNT | integer | 5 | the value of minimum count of pattern occurrence to be suggested as a pattern with a label |
| PATTERN_MIN_COUNT | integer | 10 | the value of minimum count of pattern occurrence to be suggested as a pattern without a label |
| MAX_LOGS_FOR_DEFECT_TYPE_MODEL | integer | 10000 | the value of maximum count of logs per defect type to add into defect type model training. Default value is chosen in cosideration of having space for analyzer_train docker image setuo of 1GB, if you can give more GB you can linearly allow more logs to be considered. |
| PROB_CUSTOM_MODEL_SUGGESTIONS | float | 0.7 | the probability of custom retrained model to be used for running when suggestions are requested. The maximum value is 0.8, because we want at least 20% of requests to process with a global model not to overfit for project too much. The bigger the value of this env varibale the more often custom retrained model will be used. |
| PROB_CUSTOM_MODEL_AUTO_ANALYSIS | float | 0.5 | the probability of custom retrained model to be used for running when auto-analysis is performed. The maximum value is 1.0. The bigger the value of this env varibale the more often custom retrained model will be used. |
| MAX_LOGS_FOR_DEFECT_TYPE_MODEL | integer | 10000 | the value of maximum count of logs per defect type to add into defect type model training. Default value is chosen in consideration of having space for analyzer_train docker image setuo of 1GB, if you can give more GB you can linearly allow more logs to be considered. |
| PROB_CUSTOM_MODEL_SUGGESTIONS | float | 0.7 | the probability of custom retrained model to be used for running when suggestions are requested. The maximum value is 0.8, because we want at least 20% of requests to process with a global model not to overfit for project too much. The bigger the value of this env variable the more often custom retrained model will be used. |
| PROB_CUSTOM_MODEL_AUTO_ANALYSIS | float | 0.5 | the probability of custom retrained model to be used for running when auto-analysis is performed. The maximum value is 1.0. The bigger the value of this env variable the more often custom retrained model will be used. |
| MAX_SUGGESTIONS_NUMBER | integer | 3 | the maximum number of suggestions shown in the ML suggestions area in the defect type editor. |

## Instructions for analyzer setup without Docker
Expand Down
52 changes: 26 additions & 26 deletions app/amqp/amqp.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,45 +12,48 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import logging
import os

import pika

from app.commons import logging
from app.utils import text_processing

logger = logging.getLogger("analyzerApp.amqp")


class AmqpClient:
"""AmqpClient handles communication with rabbitmq"""
def __init__(self, amqpUrl):
self.connection = AmqpClient.create_ampq_connection(amqpUrl)

connection: pika.BlockingConnection

def __init__(self, amqp_url):
self.connection = AmqpClient.create_ampq_connection(amqp_url)

@staticmethod
def create_ampq_connection(amqpUrl):
def create_ampq_connection(amqp_url):
"""Creates AMQP client"""
amqp_full_url = amqpUrl.rstrip("\\").rstrip("/") + "?heartbeat=600"
amqp_full_url = amqp_url.rstrip("\\").rstrip("/") + "?heartbeat=600"
logger.info("Try connect to %s" % text_processing.remove_credentials_from_url(amqp_full_url))
return pika.BlockingConnection(pika.connection.URLParameters(amqp_full_url))

@staticmethod
def bind_queue(channel, name, exchange_name):
"""AmqpClient binds a queue with an exchange for rabbitmq"""
try:
result = channel.queue_declare(queue=name, durable=False,
exclusive=False, auto_delete=True,
result = channel.queue_declare(queue=name, durable=False, exclusive=False, auto_delete=True,
arguments=None)
except Exception as err:
logger.error("Failed to open a channel pid(%d)", os.getpid())
logger.error(err)
except Exception as exc:
logger.error(f'Failed to declare a queue "{name}" pid({os.getpid()})')
logger.exception(exc)
os.kill(os.getpid(), 9)
return False
logger.info("Queue '%s' has been declared pid(%d)", result.method.queue, os.getpid())
try:
channel.queue_bind(exchange=exchange_name, queue=result.method.queue, routing_key=name)
except Exception as err:
logger.error("Failed to open a channel pid(%d)", os.getpid())
logger.error(err)
except Exception as exc:
logger.error(f'Failed to bind a queue "{name}" pid({os.getpid()})')
logger.exception(exc)
os.kill(os.getpid(), 9)
return True

Expand All @@ -59,16 +62,16 @@ def consume_queue(channel, queue, auto_ack, exclusive, msg_callback):
"""AmqpClient shows how to handle a message from the queue"""
try:
channel.basic_qos(prefetch_count=1, prefetch_size=0)
except Exception as err:
except Exception as exc:
logger.error("Failed to configure Qos pid(%d)", os.getpid())
logger.error(err)
logger.exception(exc)
os.kill(os.getpid(), 9)
try:
channel.basic_consume(queue=queue, auto_ack=auto_ack, exclusive=exclusive,
on_message_callback=msg_callback)
except Exception as err:
except Exception as exc:
logger.error("Failed to register a consumer pid(%d)", os.getpid())
logger.error(err)
logger.exception(exc)
os.kill(os.getpid(), 9)

def receive(self, exchange_name, queue, auto_ack, exclusive, msg_callback):
Expand All @@ -79,18 +82,15 @@ def receive(self, exchange_name, queue, auto_ack, exclusive, msg_callback):
AmqpClient.consume_queue(channel, queue, auto_ack, exclusive, msg_callback)
logger.info("started consuming pid(%d) on the queue %s", os.getpid(), queue)
channel.start_consuming()
except Exception as err:
except Exception as exc:
logger.error("Failed to consume messages pid(%d) in queue %s", os.getpid(), queue)
logger.error(err)
logger.exception(exc)
os.kill(os.getpid(), 9)

def send_to_inner_queue(self, exchange_name, queue, data):
def send_to_inner_queue(self, exchange_name: str, queue: str, data: str) -> None:
try:
channel = self.connection.channel()
channel.basic_publish(
exchange=exchange_name,
routing_key=queue,
body=data)
except Exception as err:
channel.basic_publish(exchange=exchange_name, routing_key=queue, body=bytes(data, 'utf-8'))
except Exception as exc:
logger.error("Failed to publish messages in queue %s", queue)
logger.error(err)
logger.exception(exc)
Loading

0 comments on commit 4d8830a

Please sign in to comment.