diff --git a/.github/workflows/sync-jira-versions.yml b/.github/workflows/sync-jira-versions.yml new file mode 100644 index 0000000..f31c5d4 --- /dev/null +++ b/.github/workflows/sync-jira-versions.yml @@ -0,0 +1,13 @@ +name: Add GitHub release version to Jira issues + +on: + pull_request: + types: [opened, synchronize, reopened] + +jobs: + call-jira-sync: + name: Call Jira versions update + uses: reportportal/.github/.github/workflows/update-jira-versions.yaml@main + with: + jira-server: ${{ vars.JIRA_SERVER }} + secrets: inherit \ No newline at end of file diff --git a/build.gradle b/build.gradle index 94159bd..f6dbbb6 100644 --- a/build.gradle +++ b/build.gradle @@ -54,7 +54,7 @@ ext['log4j-to-slf4j.version'] = '2.21.1' //https://nvd.nist.gov/vuln/detail/CVE-2022-26520 ext['postgresql.version'] = '42.6.0' ext['snakeyaml.version'] = '2.2' -// + dependencies { @@ -66,12 +66,31 @@ dependencies { // https://mvnrepository.com/artifact/org.apache.commons/commons-lang3 implementation 'org.apache.commons:commons-lang3:3.12.0' + // Fix CVE-2023-46589, CVE-2024-24549 + implementation 'org.apache.tomcat.embed:tomcat-embed-core:9.0.86' + implementation 'org.apache.tomcat.embed:tomcat-embed-el:9.0.86' + implementation 'org.apache.tomcat.embed:tomcat-embed-websocket:9.0.86' + + //Fix CVE-2023-6378, CVE-2023-6481, CVE-2023-6378, CVE-2023-6481 + implementation 'ch.qos.logback:logback-classic:1.2.13' + implementation 'ch.qos.logback:logback-core:1.2.13' + + //Fix CVE-2023-40827, CVE-2023-40828, CVE-2023-40826 + implementation 'org.springframework:spring-webmvc:5.3.33' + implementation 'org.springframework:spring-web:5.3.33' + + // Fix CVE-2024-25710, CVE-2024-26308 + implementation 'org.apache.commons:commons-compress:1.26.0' + + //Fix CVE-2023-34050 + implementation 'org.springframework.amqp:spring-amqp:2.4.17' implementation 'org.springframework.boot:spring-boot-starter-aop' implementation 'org.springframework.boot:spring-boot-starter-actuator' implementation 'org.springframework.boot:spring-boot-starter-jdbc' implementation 'org.springframework.boot:spring-boot-starter-web' implementation 'org.springframework.boot:spring-boot-starter-amqp' + implementation 'org.springframework:spring-jdbc:6.1.5' implementation 'org.apache.jclouds.api:s3:2.5.0' implementation 'org.apache.jclouds.provider:aws-s3:2.5.0' implementation 'org.apache.jclouds.api:filesystem:2.5.0' diff --git a/gradle.properties b/gradle.properties index fbe8358..dd3706e 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,4 +1,4 @@ -version=5.11.1 +version=5.11.2 description=EPAM Report portal. Service jobs dockerServerUrl=unix:///var/run/docker.sock dockerPrepareEnvironment= diff --git a/src/main/java/com/epam/reportportal/config/JacksonConfiguration.java b/src/main/java/com/epam/reportportal/config/JacksonConfiguration.java new file mode 100644 index 0000000..8cd7255 --- /dev/null +++ b/src/main/java/com/epam/reportportal/config/JacksonConfiguration.java @@ -0,0 +1,47 @@ +/* + * Copyright 2024 EPAM Systems + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.epam.reportportal.config; + +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.MapperFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import com.fasterxml.jackson.databind.introspect.JacksonAnnotationIntrospector; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * @author Siarhei Hrabko + */ +@Configuration +public class JacksonConfiguration { + + /** + * @return Configured object mapper + */ + @Bean(name = "objectMapper") + public ObjectMapper objectMapper() { + ObjectMapper om = new ObjectMapper(); + om.setAnnotationIntrospector(new JacksonAnnotationIntrospector()); + om.configure(MapperFeature.DEFAULT_VIEW_INCLUSION, true); + om.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + om.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false); + om.registerModule(new JavaTimeModule()); + return om; + } +} diff --git a/src/main/java/com/epam/reportportal/jobs/clean/CleanAttachmentJob.java b/src/main/java/com/epam/reportportal/jobs/clean/CleanAttachmentJob.java index afc14a5..a2f6fc7 100644 --- a/src/main/java/com/epam/reportportal/jobs/clean/CleanAttachmentJob.java +++ b/src/main/java/com/epam/reportportal/jobs/clean/CleanAttachmentJob.java @@ -17,30 +17,41 @@ @Service public class CleanAttachmentJob extends BaseCleanJob { - private static final String MOVING_QUERY = - """ - WITH moved_rows AS (DELETE FROM attachment WHERE project_id = ? AND creation_date <= ?::TIMESTAMP RETURNING *) \s - INSERT INTO attachment_deletion (id, file_id, thumbnail_id, creation_attachment_date, deletion_date)\s - SELECT id, file_id, thumbnail_id, creation_date, NOW() FROM moved_rows;"""; + private static final String MOVING_QUERY = """ + WITH moved_rows AS ( + DELETE FROM attachment\s + WHERE project_id = ?\s + AND creation_date <= ?::TIMESTAMP\s + AND launch_id IN ( + SELECT id FROM launch WHERE retention_policy='REGULAR' + )\s + RETURNING * + ) + INSERT INTO attachment_deletion (id, file_id, thumbnail_id, creation_attachment_date, + deletion_date) + SELECT id, file_id, thumbnail_id, creation_date, NOW() FROM moved_rows;"""; public CleanAttachmentJob(JdbcTemplate jdbcTemplate) { super(jdbcTemplate); } @Override - @Scheduled(cron = "${rp.environment.variable.clean.attachment.cron}") - @SchedulerLock(name = "cleanAttachment", lockAtMostFor = "24h") - public void execute() { - moveAttachments(); - } + @Scheduled(cron = "${rp.environment.variable.clean.attachment.cron}") + @SchedulerLock(name = "cleanAttachment", lockAtMostFor = "24h") + public void execute() { + moveAttachments(); + } - void moveAttachments() { - AtomicInteger counter = new AtomicInteger(0); - getProjectsWithAttribute(KEEP_SCREENSHOTS).forEach((projectId, duration) -> { - LocalDateTime lessThanDate = LocalDateTime.now(ZoneOffset.UTC).minus(duration); - int movedCount = jdbcTemplate.update(MOVING_QUERY, projectId, lessThanDate); - counter.addAndGet(movedCount); - LOGGER.info("Moved {} attachments to the deletion table for project {}, lessThanDate {} ", movedCount, projectId, lessThanDate); - }); - } + void moveAttachments() { + AtomicInteger counter = new AtomicInteger(0); + getProjectsWithAttribute(KEEP_SCREENSHOTS).forEach((projectId, duration) -> { + LocalDateTime lessThanDate = LocalDateTime.now(ZoneOffset.UTC).minus(duration); + int movedCount = jdbcTemplate.update(MOVING_QUERY, projectId, lessThanDate); + counter.addAndGet(movedCount); + LOGGER.info( + "Moved {} attachments to the deletion table for project {}, lessThanDate {} ", movedCount, + projectId, lessThanDate + ); + }); + } } diff --git a/src/main/java/com/epam/reportportal/jobs/clean/CleanLaunchJob.java b/src/main/java/com/epam/reportportal/jobs/clean/CleanLaunchJob.java index a43ce5a..d519999 100644 --- a/src/main/java/com/epam/reportportal/jobs/clean/CleanLaunchJob.java +++ b/src/main/java/com/epam/reportportal/jobs/clean/CleanLaunchJob.java @@ -27,8 +27,11 @@ public class CleanLaunchJob extends BaseCleanJob { private static final String IDS_PARAM = "ids"; private static final String PROJECT_ID_PARAM = "projectId"; private static final String START_TIME_PARAM = "startTime"; - private static final String SELECT_LAUNCH_ID_QUERY = "SELECT id FROM launch WHERE project_id = :projectId AND start_time <= :startTime::TIMESTAMP;"; - private static final String DELETE_CLUSTER_QUERY = "DELETE FROM clusters WHERE clusters.launch_id IN (:ids);"; + private static final String SELECT_LAUNCH_ID_QUERY = + "SELECT id FROM launch WHERE project_id = :projectId AND start_time <= " + + ":startTime::TIMESTAMP AND retention_policy = 'REGULAR'"; + private static final String DELETE_CLUSTER_QUERY = + "DELETE FROM clusters WHERE clusters.launch_id IN (:ids);"; private static final String DELETE_LAUNCH_QUERY = "DELETE FROM launch WHERE id IN (:ids);"; private final Integer batchSize; private final NamedParameterJdbcTemplate namedParameterJdbcTemplate; @@ -39,9 +42,8 @@ public class CleanLaunchJob extends BaseCleanJob { public CleanLaunchJob( @Value("${rp.environment.variable.elements-counter.batch-size}") Integer batchSize, - JdbcTemplate jdbcTemplate, - NamedParameterJdbcTemplate namedParameterJdbcTemplate, CleanLogJob cleanLogJob, - IndexerServiceClient indexerServiceClient, + JdbcTemplate jdbcTemplate, NamedParameterJdbcTemplate namedParameterJdbcTemplate, + CleanLogJob cleanLogJob, IndexerServiceClient indexerServiceClient, ApplicationEventPublisher eventPublisher, SearchEngineClient searchEngineClient) { super(jdbcTemplate); this.batchSize = batchSize; @@ -53,23 +55,23 @@ public CleanLaunchJob( } @Override - @Scheduled(cron = "${rp.environment.variable.clean.launch.cron}") - @SchedulerLock(name = "cleanLaunch", lockAtMostFor = "24h") - public void execute() { - removeLaunches(); - cleanLogJob.removeLogs(); - } + @Scheduled(cron = "${rp.environment.variable.clean.launch.cron}") + @SchedulerLock(name = "cleanLaunch", lockAtMostFor = "24h") + public void execute() { + removeLaunches(); + cleanLogJob.removeLogs(); + } - private void removeLaunches() { - AtomicInteger counter = new AtomicInteger(0); - getProjectsWithAttribute(KEEP_LAUNCHES).forEach((projectId, duration) -> { - final LocalDateTime lessThanDate = LocalDateTime.now(ZoneOffset.UTC).minus(duration); - final List launchIds = getLaunchIds(projectId, lessThanDate); - if (!launchIds.isEmpty()) { - deleteClusters(launchIds); -// final Long numberOfLaunchElements = countNumberOfLaunchElements(launchIds); - int deleted = namedParameterJdbcTemplate.update(DELETE_LAUNCH_QUERY, - Map.of(IDS_PARAM, launchIds)); + private void removeLaunches() { + AtomicInteger counter = new AtomicInteger(0); + getProjectsWithAttribute(KEEP_LAUNCHES).forEach((projectId, duration) -> { + final LocalDateTime lessThanDate = LocalDateTime.now(ZoneOffset.UTC).minus(duration); + final List launchIds = getLaunchIds(projectId, lessThanDate); + if (!launchIds.isEmpty()) { + deleteClusters(launchIds); + // final Long numberOfLaunchElements = countNumberOfLaunchElements(launchIds); + int deleted = + namedParameterJdbcTemplate.update(DELETE_LAUNCH_QUERY, Map.of(IDS_PARAM, launchIds)); counter.addAndGet(deleted); LOGGER.info("Delete {} launches for project {}", deleted, projectId); // to avoid error message in analyzer log, doesn't find index @@ -79,12 +81,14 @@ private void removeLaunches() { deleteLogsFromSearchEngineByLaunchIdsAndProjectId(launchIds, projectId); -// eventPublisher.publishEvent(new ElementsDeletedEvent(launchIds, projectId, numberOfLaunchElements)); -// LOGGER.info("Send event with elements deleted number {} for project {}", deleted, projectId); - } - } - }); - } + // eventPublisher.publishEvent(new ElementsDeletedEvent(launchIds, + // projectId, numberOfLaunchElements)); + // LOGGER.info("Send event with elements deleted number {} for + // project {}", deleted, projectId); + } + } + }); + } private void deleteLogsFromSearchEngineByLaunchIdsAndProjectId(List launchIds, Long projectId) { @@ -96,8 +100,7 @@ private void deleteLogsFromSearchEngineByLaunchIdsAndProjectId(List launch private List getLaunchIds(Long projectId, LocalDateTime lessThanDate) { return namedParameterJdbcTemplate.queryForList(SELECT_LAUNCH_ID_QUERY, - Map.of(PROJECT_ID_PARAM, projectId, START_TIME_PARAM, lessThanDate), - Long.class + Map.of(PROJECT_ID_PARAM, projectId, START_TIME_PARAM, lessThanDate), Long.class ); } @@ -111,20 +114,16 @@ private Long countNumberOfLaunchElements(List launchIds) { "SELECT item_id FROM test_item WHERE launch_id IN (:ids) UNION " + "SELECT item_id FROM test_item WHERE retry_of IS NOT NULL AND retry_of IN " + "(SELECT item_id FROM test_item WHERE launch_id IN (:ids))", - Map.of(IDS_PARAM, launchIds), - Long.class + Map.of(IDS_PARAM, launchIds), Long.class ); resultedNumber.addAndGet(itemIds.size()); - Lists.partition(itemIds, batchSize) - .forEach(batch -> resultedNumber.addAndGet( - Optional.ofNullable(namedParameterJdbcTemplate.queryForObject( - "SELECT COUNT(*) FROM log WHERE item_id IN (:ids);", - Map.of(IDS_PARAM, batch), - Long.class - )).orElse(0L))); + Lists.partition(itemIds, batchSize).forEach(batch -> resultedNumber.addAndGet( + Optional.ofNullable(namedParameterJdbcTemplate.queryForObject( + "SELECT COUNT(*) FROM log WHERE item_id IN (:ids);", Map.of(IDS_PARAM, batch), + Long.class + )).orElse(0L))); resultedNumber.addAndGet(Optional.ofNullable(namedParameterJdbcTemplate.queryForObject( - "SELECT COUNT(*) FROM log WHERE log.launch_id IN (:ids);", - Map.of(IDS_PARAM, launchIds), + "SELECT COUNT(*) FROM log WHERE log.launch_id IN (:ids);", Map.of(IDS_PARAM, launchIds), Long.class )).orElse(0L)); return resultedNumber.longValue(); diff --git a/src/main/java/com/epam/reportportal/jobs/clean/CleanLogJob.java b/src/main/java/com/epam/reportportal/jobs/clean/CleanLogJob.java index 20c25d6..55fa6c1 100644 --- a/src/main/java/com/epam/reportportal/jobs/clean/CleanLogJob.java +++ b/src/main/java/com/epam/reportportal/jobs/clean/CleanLogJob.java @@ -23,8 +23,19 @@ public class CleanLogJob extends BaseCleanJob { private static final String PROJECT_ID_PARAM = "projectId"; private static final String START_TIME_PARAM = "startTime"; - private static final String DELETE_LOGS_QUERY = "DELETE FROM log WHERE project_id = ? AND log_time <= ?::TIMESTAMP;"; - private static final String SELECT_LAUNCH_ID_QUERY = "SELECT id FROM launch WHERE project_id = :projectId AND start_time <= :startTime::TIMESTAMP;"; + private static final String DELETE_LOGS_QUERY = """ + DELETE FROM log + WHERE log.project_id = ? AND log.log_time <= ?::TIMESTAMP + AND COALESCE(log.launch_id, + (SELECT test_item.launch_id FROM test_item WHERE test_item.item_id = log.item_id), + (SELECT test_item.launch_id FROM test_item WHERE test_item.item_id = + (SELECT ti.retry_of FROM test_item ti WHERE ti.item_id = log.item_id) + ) + ) IN (SELECT launch.id FROM launch WHERE launch.retention_policy = 'REGULAR'); + """; + private static final String SELECT_LAUNCH_ID_QUERY = + "SELECT id FROM launch WHERE project_id = :projectId AND start_time <= " + + ":startTime::TIMESTAMP;"; private final CleanAttachmentJob cleanAttachmentJob; private final IndexerServiceClient indexerServiceClient; @@ -45,34 +56,34 @@ public CleanLogJob(JdbcTemplate jdbcTemplate, CleanAttachmentJob cleanAttachment } @Override - @Scheduled(cron = "${rp.environment.variable.clean.log.cron}") - @SchedulerLock(name = "cleanLog", lockAtMostFor = "24h") - public void execute() { - removeLogs(); - cleanAttachmentJob.moveAttachments(); - } + @Scheduled(cron = "${rp.environment.variable.clean.log.cron}") + @SchedulerLock(name = "cleanLog", lockAtMostFor = "24h") + public void execute() { + removeLogs(); + cleanAttachmentJob.moveAttachments(); + } - void removeLogs() { - AtomicInteger counter = new AtomicInteger(0); - // TODO: Need to refactor Logs to keep real it's launchId and combine code with - // CleanLaunch to avoid duplication - getProjectsWithAttribute(KEEP_LOGS).forEach((projectId, duration) -> { - final LocalDateTime lessThanDate = LocalDateTime.now(ZoneOffset.UTC).minus(duration); - int deleted = jdbcTemplate.update(DELETE_LOGS_QUERY, projectId, lessThanDate); - counter.addAndGet(deleted); - LOGGER.info("Delete {} logs for project {}", deleted, projectId); - // to avoid error message in analyzer log, doesn't find index - if (deleted > 0) { - indexerServiceClient.removeFromIndexLessThanLogDate(projectId, lessThanDate); - LOGGER.info("Send message for deletion to analyzer for project {}", projectId); + void removeLogs() { + AtomicInteger counter = new AtomicInteger(0); + // TODO: Need to refactor Logs to keep real it's launchId and combine code with + // CleanLaunch to avoid duplication + getProjectsWithAttribute(KEEP_LOGS).forEach((projectId, duration) -> { + final LocalDateTime lessThanDate = LocalDateTime.now(ZoneOffset.UTC).minus(duration); + int deleted = jdbcTemplate.update(DELETE_LOGS_QUERY, projectId, lessThanDate); + counter.addAndGet(deleted); + LOGGER.info("Delete {} logs for project {}", deleted, projectId); + // to avoid error message in analyzer log, doesn't find index + if (deleted > 0) { + indexerServiceClient.removeFromIndexLessThanLogDate(projectId, lessThanDate); + LOGGER.info("Send message for deletion to analyzer for project {}", projectId); final List launchIds = getLaunchIds(projectId, lessThanDate); if (!launchIds.isEmpty()) { deleteLogsFromSearchEngineByLaunchIdsAndProjectId(launchIds, projectId); } - } - }); - } + } + }); + } private void deleteLogsFromSearchEngineByLaunchIdsAndProjectId(List launchIds, Long projectId) { @@ -84,8 +95,7 @@ private void deleteLogsFromSearchEngineByLaunchIdsAndProjectId(List launch private List getLaunchIds(Long projectId, LocalDateTime lessThanDate) { return namedParameterJdbcTemplate.queryForList(SELECT_LAUNCH_ID_QUERY, - Map.of(PROJECT_ID_PARAM, projectId, START_TIME_PARAM, lessThanDate), - Long.class + Map.of(PROJECT_ID_PARAM, projectId, START_TIME_PARAM, lessThanDate), Long.class ); } } diff --git a/src/main/java/com/epam/reportportal/jobs/statistics/DefectUpdateStatisticsJob.java b/src/main/java/com/epam/reportportal/jobs/statistics/DefectUpdateStatisticsJob.java new file mode 100644 index 0000000..914fa80 --- /dev/null +++ b/src/main/java/com/epam/reportportal/jobs/statistics/DefectUpdateStatisticsJob.java @@ -0,0 +1,192 @@ +/* + * Copyright 2024 EPAM Systems + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.epam.reportportal.jobs.statistics; + +import static org.springframework.http.HttpMethod.POST; + +import com.epam.reportportal.jobs.BaseJob; +import java.security.SecureRandom; +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.temporal.ChronoUnit; +import java.util.HashSet; +import java.util.Set; +import net.javacrumbs.shedlock.spring.annotation.SchedulerLock; +import org.apache.commons.lang3.StringUtils; +import org.json.JSONArray; +import org.json.JSONObject; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpHeaders; +import org.springframework.http.MediaType; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.client.RestTemplate; + +/** + * Sends statistics about amounts of manual analyzed items to the GA4 service. + * + * @author Maksim Antonov + */ +@Service +public class DefectUpdateStatisticsJob extends BaseJob { + + private static final String GA_URL = "https://www.google-analytics.com/mp/collect?measurement_id=%s&api_secret=%s"; + private static final String DATE_BEFORE = "date_before"; + + private static final String SELECT_INSTANCE_ID_QUERY = "SELECT value FROM server_settings WHERE key = 'server.details.instance';"; + private static final String SELECT_STATISTICS_QUERY = "SELECT * FROM analytics_data WHERE type = 'DEFECT_UPDATE_STATISTICS' AND created_at >= :date_before::TIMESTAMP;"; + private static final String DELETE_STATISTICS_QUERY = "DELETE FROM analytics_data WHERE type = 'DEFECT_UPDATE_STATISTICS';"; + + private final NamedParameterJdbcTemplate namedParameterJdbcTemplate; + + private final RestTemplate restTemplate; + + private final String mId; + private final String gaId; + + + /** + * Initializes {@link DefectUpdateStatisticsJob}. + * + * @param jdbcTemplate {@link JdbcTemplate} + */ + @Autowired + public DefectUpdateStatisticsJob(JdbcTemplate jdbcTemplate, + @Value("${rp.environment.variable.ga.mId}") String mId, + @Value("${rp.environment.variable.ga.id}") String gaId, + NamedParameterJdbcTemplate namedParameterJdbcTemplate) { + super(jdbcTemplate); + this.mId = mId; + this.gaId = gaId; + this.namedParameterJdbcTemplate = namedParameterJdbcTemplate; + this.restTemplate = new RestTemplate(); + } + + + /** + * Sends analyzed items statistics. + */ + @Override + @Scheduled(cron = "${rp.environment.variable.ga.cron}") + @SchedulerLock(name = "defectUpdateStatisticsJob", lockAtMostFor = "24h") + @Transactional + public void execute() { + LOGGER.info("Start sending items defect update statistics"); + if (StringUtils.isEmpty(mId) || StringUtils.isEmpty(gaId)) { + LOGGER.info( + "Both 'mId' and 'id' environment variables should be provided in order to run the job 'defectUpdateStatisticsJob'"); + return; + } + + var now = Instant.now(); + var dateBefore = now.minus(1, ChronoUnit.DAYS) + .atOffset(ZoneOffset.UTC) + .toLocalDateTime(); + MapSqlParameterSource queryParams = new MapSqlParameterSource(); + queryParams.addValue(DATE_BEFORE, dateBefore); + + namedParameterJdbcTemplate.query(SELECT_STATISTICS_QUERY, queryParams, rs -> { + int autoAnalyzed = 0; + int userAnalyzed = 0; + int sentToAnalyze = 0; + String version; + boolean analyzerEnabled; + Set status = new HashSet<>(); + Set autoAnalysisState = new HashSet<>(); + + do { + var metadata = new JSONObject(rs.getString("metadata")) + .getJSONObject("metadata"); + + analyzerEnabled = metadata.optBoolean("analyzerEnabled"); + if (analyzerEnabled) { + autoAnalysisState.add(metadata.getBoolean("autoAnalysisOn") ? "on" : "off"); + } + + if (metadata.optInt("userAnalyzed") > 0) { + status.add("manually"); + } else { + status.add("automatically"); + } + + userAnalyzed += metadata.optInt("userAnalyzed"); + autoAnalyzed += metadata.optInt("analyzed"); + sentToAnalyze += metadata.optInt("userAnalyzed") + metadata.optInt("sentToAnalyze"); + version = metadata.getString("version"); + + } while (rs.next()); + + var instanceId = jdbcTemplate.queryForObject(SELECT_INSTANCE_ID_QUERY, String.class); + var params = new JSONObject(); + params.put("category", "analyzer"); + params.put("instanceID", instanceId); + params.put("timestamp", now.toEpochMilli()); + params.put("version", version); + params.put("type", analyzerEnabled ? "is_analyzer" : "not_analyzer"); + if (analyzerEnabled) { + params.put("number", autoAnalyzed + "#" + userAnalyzed + "#" + sentToAnalyze); + params.put("auto_analysis", String.join("#", autoAnalysisState)); + params.put("status", String.join("#", status)); + } + + var event = new JSONObject(); + event.put("name", "analyze_analyzer"); + event.put("params", params); + + JSONArray events = new JSONArray(); + events.put(event); + + JSONObject requestBody = new JSONObject(); + requestBody.put("client_id", + now.toEpochMilli() + "." + new SecureRandom().nextInt(100_000, 999_999)); + requestBody.put("events", events); + + sendRequest(requestBody); + + }); + } + + private void sendRequest(JSONObject requestBody) { + try { + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("Sending statistics data: {}", requestBody); + } + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + HttpEntity request = new HttpEntity<>(requestBody.toString(), headers); + + String url = String.format(GA_URL, mId, gaId); + + var response = restTemplate.exchange(url, POST, request, String.class); + if (response.getStatusCodeValue() != 204) { + LOGGER.error("Failed to send statistics: {}", response); + } + } catch (Exception e) { + LOGGER.error("Failed to send statistics", e); + } finally { + jdbcTemplate.execute(DELETE_STATISTICS_QUERY); + } + } + +} diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml index 588149e..6c4d0a1 100644 --- a/src/main/resources/application.yml +++ b/src/main/resources/application.yml @@ -39,6 +39,10 @@ rp: project: ## 1 minute cron: '0 */1 * * * *' + ga: + id: ovxfTlz7QOeaZDPbroXZQA + mId: G-Z22WZS0E4E + cron: '0 0 */24 * * *' executor: pool: storage: