Skip to content

Commit

Permalink
Merge pull request #191 from reportportal/rc/5.13.0
Browse files Browse the repository at this point in the history
5.13.0 Release
  • Loading branch information
pbortnik authored Dec 11, 2024
2 parents 97dabde + ed40d99 commit 04fb1ff
Show file tree
Hide file tree
Showing 84 changed files with 1,555 additions and 1,050 deletions.
2 changes: 1 addition & 1 deletion .bumpversion.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[bumpversion]
current_version = 5.11.0
current_version = 5.13.0
commit = True
tag = True
parse = (?P<major>\d+)(\.(?P<minor>\d+))?(\.(?P<patch>\d+))?(\-(?P<release>[a-zA-Z]+))?(\-(?P<releasenum>[a-zA-Z]+))?(\-(?P<snapshot>[a-zA-Z]+))?(\-(?P<build>\d+))?
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/build-dev-image.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,4 +32,5 @@ jobs:
image-tag: 'develop-${{ github.run_number }}'
version: 'develop-${{ github.run_number }}'
date: ${{ needs.variables-setup.outputs.date }}
runs-on: ubuntu-latest
secrets: inherit
25 changes: 1 addition & 24 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
| MINIO_ACCESS_KEY | string | minio | you need to set a minio access key here |
| MINIO_SECRET_KEY | string | minio123 | you need to set a minio secret key here |
| MINIO_USE_TLS | boolean | false | Flag to indicate to use secure (TLS) connection to S3 service or not. |
| ANALYZER_BINSTORE_BUCKETPREFIX | string | prj- | the prefix for buckets which are added to each project filepath. |
| ANALYZER_BINSTORE_BUCKETPREFIX | string | prj- | The prefix for buckets which will be transformed to project folder by appending Project ID, E.G. `prj-2`. It can contain root path, like: `my-bucket/reportportal/prj-`, then the service will operate in single bucket mode and will put everything to `my-bucket` bucket, under `reportportal/prj-2` (for default project of default user) path. |
| ANALYZER_BINSTORE_MINIO_REGION | string | | the region which you can specify for saving in AWS S3 |
| INSTANCE_TASK_TYPE | string | | if you want to run a standard analyzer instance, leave it as blank. If you want to run an instance for training, set "train" here. |
| FILESYSTEM_DEFAULT_PATH | string | storage | the path where will be stored all the information connected with analyzer, if `ANALYZER_BINARYSTORE_TYPE` is set to `filesystem`. If you want to mount this folder to some folder on your machine, you can use this instruction in the docker compose:<br/><code>volumes:<br/>&nbsp;&nbsp;- ./data/analyzer:/backend/storage</code> |
Expand Down Expand Up @@ -116,26 +116,3 @@ Perform next steps inside source directory of the analyzer.
```bash
/analyzer-train/bin/uwsgi --ini res/analyzer-train.ini
```

### For Windows:
1. Create a virtual environment with any name (in the example **env**)
```
python -m venv env
```
2. Activate the virtual environment
```
call env\Scripts\activate.bat
```
3. Install python libraries
```
python -m pip install -r requirements_windows.txt
```
4. Install stopwords package from the nltk library
```
python -m nltk.downloader stopwords
```
5. Start the program.
```
python app/app.py
```

2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
5.11.0
5.13.0
6 changes: 3 additions & 3 deletions app/commons/clusterizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from app.commons import logging
from app.utils import utils, text_processing

logger = logging.getLogger("analyzerApp.clusterizer")
LOGGER = logging.getLogger("analyzerApp.clusterizer")


class Clusterizer:
Expand Down Expand Up @@ -65,7 +65,7 @@ def find_groups_by_similarity(
rearranged_groups[cluster].append(real_id)
new_group_id += 1
group_id = new_group_id
logger.debug("Time for finding groups: %.2f s", time() - start_time)
LOGGER.debug("Time for finding groups: %.2f s", time() - start_time)
return rearranged_groups

def similarity_groupping(
Expand Down Expand Up @@ -125,7 +125,7 @@ def unite_groups_by_hashes(self, messages: list[str], threshold: float = 0.95) -
if cluster not in rearranged_groups:
rearranged_groups[cluster] = []
rearranged_groups[cluster].append(key)
logger.debug("Time for finding hash groups: %.2f s", time() - start_time)
LOGGER.debug("Time for finding hash groups: %.2f s", time() - start_time)
return rearranged_groups

def perform_light_deduplication(self, messages: list[str]) -> tuple[list[str], dict[int, list[int]]]:
Expand Down
12 changes: 3 additions & 9 deletions app/commons/esclient.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,9 @@
from urllib3.exceptions import InsecureRequestWarning

from app.amqp.amqp import AmqpClient
from app.commons import logging
from app.commons import logging, request_factory, log_merger
from app.commons.model.launch_objects import ApplicationConfig, Response, Launch, TestItem, BulkResponse
from app.commons.model.ml import TrainInfo, ModelType
from app.commons.log_merger import LogMerger
from app.commons.log_requests import LogRequests
from app.utils import utils, text_processing

logger = logging.getLogger("analyzerApp.esclient")
Expand All @@ -40,16 +38,12 @@ class EsClient:
app_config: ApplicationConfig
es_client: elasticsearch.Elasticsearch
host: str
log_requests: LogRequests
log_merger: LogMerger
tables_to_recreate: list[str]

def __init__(self, app_config: ApplicationConfig, es_client: elasticsearch.Elasticsearch = None):
self.app_config = app_config
self.host = app_config.esHost
self.es_client = es_client or self.create_es_client(app_config)
self.log_requests = LogRequests()
self.log_merger = LogMerger()
self.tables_to_recreate = ["rp_aa_stats", "rp_model_train_stats", "rp_suggestions_info_metrics"]

def create_es_client(self, app_config: ApplicationConfig) -> elasticsearch.Elasticsearch:
Expand Down Expand Up @@ -220,7 +214,7 @@ def _to_index_bodies(
if log.logLevel < utils.ERROR_LOGGING_LEVEL or not log.message.strip():
continue

bodies.append(LogRequests._prepare_log(launch, test_item, log, project_with_prefix))
bodies.append(request_factory.prepare_log(launch, test_item, log, project_with_prefix))
logs_added = True
if logs_added:
test_item_ids.append(str(test_item.testItemId))
Expand Down Expand Up @@ -276,7 +270,7 @@ def _merge_logs(self, test_item_ids, project):
test_items_dict[test_item_id] = []
test_items_dict[test_item_id].append(r)
for test_item_id in test_items_dict:
merged_logs, _ = self.log_merger.decompose_logs_merged_and_without_duplicates(
merged_logs, _ = log_merger.decompose_logs_merged_and_without_duplicates(
test_items_dict[test_item_id])
for log in merged_logs:
if log["_source"]["is_merged"]:
Expand Down
Loading

0 comments on commit 04fb1ff

Please sign in to comment.