Skip to content

Commit

Permalink
Merge pull request #522 from egovernments/stock-transfer-parties-HLM-…
Browse files Browse the repository at this point in the history
…3372

Stock transfer parties hlm 3372
  • Loading branch information
kavi-egov authored Oct 16, 2023
2 parents 70e7334 + c7f68e4 commit 9d058b2
Show file tree
Hide file tree
Showing 104 changed files with 5,568 additions and 501 deletions.
61 changes: 61 additions & 0 deletions .github/workflows/codacy.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.

# This workflow checks out code, performs a Codacy security scan
# and integrates the results with the
# GitHub Advanced Security code scanning feature. For more information on
# the Codacy security scan action usage and parameters, see
# https://github.com/codacy/codacy-analysis-cli-action.
# For more information on Codacy Analysis CLI in general, see
# https://github.com/codacy/codacy-analysis-cli.

name: Codacy Security Scan

on:
push:
branches: [ "master", "master|hlm-[0-9]+.*", "dev" ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ "master", "dev" ]
schedule:
- cron: '30 13 * * 1'

permissions:
contents: read

jobs:
codacy-security-scan:
permissions:
contents: read # for actions/checkout to fetch code
security-events: write # for github/codeql-action/upload-sarif to upload SARIF results
actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status
name: Codacy Security Scan
runs-on: ubuntu-latest
steps:
# Checkout the repository to the GitHub Actions runner
- name: Checkout code
uses: actions/checkout@v3

# Execute Codacy Analysis CLI and generate a SARIF output with the security issues identified during the analysis
- name: Run Codacy Analysis CLI
uses: codacy/codacy-analysis-cli-action@d840f886c4bd4edc059706d09c6a1586111c540b
with:
# Check https://github.com/codacy/codacy-analysis-cli#project-token to get your project token from your Codacy repository
# You can also omit the token and run the tools that support default configurations
project-token: ${{ secrets.CODACY_PROJECT_TOKEN }}
verbose: true
output: results.sarif
format: sarif
# Adjust severity of non-security issues
gh-code-scanning-compat: true
# Force 0 exit code to allow SARIF file generation
# This will handover control about PR rejection to the GitHub side
max-allowed-issues: 2147483647

# Upload the SARIF file generated in the previous step
- name: Upload SARIF results file
uses: github/codeql-action/upload-sarif@v2
with:
sarif_file: results.sarif
12 changes: 12 additions & 0 deletions build/build-config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,13 @@ config:
dockerfile: "build/maven/Dockerfile"
- work-dir: "health-services/project/src/main/resources/db"
image-name: "project-db"
- name: "builds/health-campaign-services/health-services/referralmanagement"
build:
- work-dir: "health-services/referralmanagement"
image-name: "referralmanagement"
dockerfile: "build/maven/Dockerfile"
- work-dir: "health-services/referralmanagement/src/main/resources/db"
image-name: "referralmanagement-db"
- name: "builds/health-campaign-services/health-services/household"
build:
- work-dir: "health-services/household"
Expand All @@ -57,6 +64,11 @@ config:
dockerfile: "build/maven/Dockerfile"
- work-dir: "health-services/household/src/main/resources/db"
image-name: "household-db"
- name: "builds/health-campaign-services/core-services/error-handler"
build:
- work-dir: "core-services/error-handler"
image-name: "error-handler"
dockerfile: "build/maven/Dockerfile"
- name: "builds/health-campaign-services/core-services/dashboard-analytics"
build:
- work-dir: "core-services/dashboard-analytics"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import com.tarento.analytics.dto.Plot;
import com.tarento.analytics.helper.ComputedFieldFactory;
import com.tarento.analytics.helper.IComputedField;
import com.tarento.analytics.helper.SortingHelper;
import com.tarento.analytics.model.ComputedFields;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand All @@ -33,6 +34,8 @@ public class AdvanceTableChartResponseHandler implements IResponseHandler {

@Autowired
private ObjectMapper mapper;
@Autowired
private SortingHelper sortingHelper;

@Autowired
private ComputedFieldFactory computedFieldFactory;
Expand Down Expand Up @@ -116,6 +119,7 @@ public AggregateDto translate(AggregateRequestDto requestDto, ObjectNode aggrega
});

});
List<Data> finalDataList = dataList;
mappings.entrySet().stream().forEach(plotMap -> {
List<Plot> plotList = plotMap.getValue().values().stream().collect(Collectors.toList());
//filter out data object with all zero data.
Expand Down Expand Up @@ -146,7 +150,7 @@ public AggregateDto translate(AggregateRequestDto requestDto, ObjectNode aggrega
logger.error("execution of computed field :"+e.getMessage());
}
}
dataList.add(data);
finalDataList.add(data);
}

});
Expand All @@ -168,7 +172,11 @@ public AggregateDto translate(AggregateRequestDto requestDto, ObjectNode aggrega
});
}
}


if (chartNode.has("sort")) {
dataList = sortingHelper.tableSort(dataList, chartNode.get("sort").asText());
}

return getAggregatedDto(chartNode, dataList, requestDto.getVisualizationCode());

}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,8 +85,8 @@ public AggregateDto translate(AggregateRequestDto requestDto, ObjectNode aggrega

if(isPredictionEnabled ){
List<JsonNode> aggrNodes = aggregationNode.findValues(CHART_SPECIFIC);
startDate = aggrNodes.get(0).findValues(START_DATE).get(0).findValues("key").get(0).asLong();
endDate = aggrNodes.get(0).findValues(END_DATE).get(0).findValues("key").get(0).asLong();
startDate = (aggrNodes.get(0).findValues(START_DATE).get(0).findValues("key").get(0).asLong()/86400000)*86400000;
endDate = (aggrNodes.get(0).findValues(END_DATE).get(0).findValues("key").get(0).asLong()/86400000)*86400000;
interval=Constants.Interval.day.toString();
addTargetDates(startDate, endDate,targetEpochKeys);
}
Expand Down Expand Up @@ -117,14 +117,18 @@ public AggregateDto translate(AggregateRequestDto requestDto, ObjectNode aggrega
Set<String> finalBucketKeys = new LinkedHashSet<>();

// For multi aggr, find all plot keys first
enrichBucketKeys(aggrNodes, finalBucketKeys, interval);
// enrichBucketKeys(aggrNodes, finalBucketKeys, interval);
enrichBucketKeys(aggrNodes, finalBucketKeys, interval, startDate, isPredictionEnabled);
initializeMultiAggrPlotMap(multiAggrPlotMap, finalBucketKeys);

for(JsonNode aggrNode : aggrNodes) {
if (aggrNode.findValues(IResponseHandler.BUCKETS).size() > 0) {
ArrayNode buckets = (ArrayNode) aggrNode.findValues(IResponseHandler.BUCKETS).get(0);
for(JsonNode bucket : buckets){
JsonNode bkey = bucket.findValue(IResponseHandler.KEY);
if (isPredictionEnabled && Long.parseLong(bkey.asText()) < startDate) {
continue;
}
String key = getIntervalKey(bkey.asText(), Constants.Interval.valueOf(interval));
plotKeys.add(key);
if(isPredictionEnabled && !headerPath.equals(predictionPath)){
Expand Down Expand Up @@ -354,13 +358,16 @@ private void initializeMultiAggrPlotMap(Map<String, Double> multiAggrPlotMap, Se
});
}

private void enrichBucketKeys(List<JsonNode> aggrNodes, Set<String> finalBucketKeys, String interval) {
private void enrichBucketKeys(List<JsonNode> aggrNodes, Set<String> finalBucketKeys, String interval, Long startDate, Boolean isPredictionEnabled) {
List<String> bkeyList = new ArrayList<>();
for(JsonNode aggrNode : aggrNodes) {
if (aggrNode.findValues(IResponseHandler.BUCKETS).size() > 0) {
ArrayNode buckets = (ArrayNode) aggrNode.findValues(IResponseHandler.BUCKETS).get(0);
for(JsonNode bucket : buckets){
String bkey = bucket.findValue(IResponseHandler.KEY).asText();
if (isPredictionEnabled && Long.parseLong(bkey) < (startDate)) {
continue;
}
bkeyList.add(bkey);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ public class MetricChartResponseHandler implements IResponseHandler{
@Override
public AggregateDto translate(AggregateRequestDto request, ObjectNode aggregations) throws IOException {
List<Data> dataList = new ArrayList<>();
String requestId = request.getRequestId();
String requestId = request.getRequestId();
String visualizationCode = request.getVisualizationCode();

JsonNode aggregationNode = aggregations.get(AGGREGATIONS);
Expand Down Expand Up @@ -203,9 +203,20 @@ public AggregateDto translate(AggregateRequestDto request, ObjectNode aggregatio
data.setPlots( Arrays.asList(latestDateplot,lastUpdatedTime));
request.getResponseRecorder().put(visualizationCode, request.getModuleLevel(), data);
dataList.add(data);
if(chartNode.get(POST_AGGREGATION_THEORY) != null) {
if(chartNode.get(POST_AGGREGATION_THEORY) != null) {
ComputeHelper computeHelper = computeHelperFactory.getInstance(chartNode.get(POST_AGGREGATION_THEORY).asText());
computeHelper.compute(request, dataList);
// computeHelper.compute(request, dataList);
List<Data> capDataList = new ArrayList<>();

if (chartNode.has(IS_CAPPED_BY_CAMPAIGN_PERIOD)) {
List<JsonNode> valueNode = aggregationNode.findValues(chartNode.get(IS_CAPPED_BY_CAMPAIGN_PERIOD).get(0).asText());
if(valueNode.size() > 0) {
Long val = valueNode.get(0).get(IResponseHandler.VALUE).asLong();
Data dataNode = new Data(aggrsPaths.get(0).asText(), val.doubleValue(), chartNode.get(IResponseHandler.VALUE_TYPE).asText());
capDataList.add(dataNode);
}
}
computeHelper.compute(request, dataList, capDataList);
}
}catch (Exception e){
logger.info("data chart name = "+chartName +" ex occurred "+e.getMessage());
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
package com.tarento.analytics.helper;

import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.tarento.analytics.dto.AggregateRequestDto;
import com.tarento.analytics.dto.Data;
import com.tarento.analytics.dto.Plot;
Expand All @@ -9,12 +11,14 @@
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;

import static com.tarento.analytics.handler.IResponseHandler.HIDE_HEADER_DENOMINATION;
import static com.tarento.analytics.handler.IResponseHandler.IS_CAPPED_BY_CAMPAIGN_PERIOD;

@Component
public class AdditiveComputedField implements IComputedField<Data> {
Expand All @@ -40,16 +44,31 @@ public void add(Data data, List<String> fields, String newField,JsonNode chartNo
Map<String, Plot> plotMap = data.getPlots().stream().collect(Collectors.toMap(Plot::getName, Function.identity()));

double total = 0.0;
double capTotal = 0.0;
for (String field: fields){
if(plotMap.containsKey(field)){
dataType = plotMap.get(field).getSymbol();
if(chartNode.has(IS_CAPPED_BY_CAMPAIGN_PERIOD) && doesTextExistInArrayNode((ArrayNode) chartNode.get(IS_CAPPED_BY_CAMPAIGN_PERIOD), field)) continue;
total = total+ plotMap.get(field).getValue();
}
}
if(postAggrTheoryName != null && !postAggrTheoryName.isEmpty()) {
ComputeHelper computeHelper = computeHelperFactory.getInstance(postAggrTheoryName);
if (chartNode.has(IS_CAPPED_BY_CAMPAIGN_PERIOD)) {
List<String> commonStrings = new ArrayList<>();
chartNode.get(IS_CAPPED_BY_CAMPAIGN_PERIOD).forEach(
item -> {
if (fields.contains(item.asText())) {
commonStrings.add(item.asText());
}
}
);
if(commonStrings.size()>0) {
capTotal = commonStrings.stream().mapToDouble(commonString -> plotMap.get(commonString).getValue()).sum();
}
}

total = computeHelper.compute(aggregateRequestDto,total );
total = computeHelper.compute(aggregateRequestDto,total,capTotal );
}


Expand All @@ -62,5 +81,16 @@ public void add(Data data, List<String> fields, String newField,JsonNode chartNo
}

}
private static boolean doesTextExistInArrayNode(ArrayNode arrayNode, String searchText) {
for (JsonNode element : arrayNode) {
if (element.isTextual()) {
String text = element.asText();
if (text.equals(searchText)) {
return true;
}
}
}
return false;
}
}

Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,9 @@
*
*/
public interface ComputeHelper {

public List<Data> compute(AggregateRequestDto request, List<Data> data);
public List<Data> compute(AggregateRequestDto request, List<Data> data, List<Data> capValues);
public Double compute(AggregateRequestDto request, double value);
public Double compute(AggregateRequestDto request, double value, double capTotal);

}
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import java.util.List;

import static com.tarento.analytics.constant.Constants.PostAggregationTheories.RESPONSE_DIFF_DATES;
import static com.tarento.analytics.handler.IResponseHandler.IS_CAPPED_BY_CAMPAIGN_PERIOD;

@Component
public class NoOpsComputedField implements IComputedField<ObjectNode>{
Expand All @@ -37,6 +38,7 @@ public void set(AggregateRequestDto requestDto, String postAggrTheoryName){
public void add(ObjectNode data, List<String> fields, String newField, JsonNode chartNode ) {
ObjectNode noOpsNode = JsonNodeFactory.instance.objectNode();
List<Data> dataList = new ArrayList<>();
List<Data> capDataList = new ArrayList<>();
try {
List<JsonNode> values = data.findValues(fields.get(0));
if (postAggrTheoryName.equalsIgnoreCase(RESPONSE_DIFF_DATES)) {
Expand All @@ -45,8 +47,14 @@ public void add(ObjectNode data, List<String> fields, String newField, JsonNode
Data dataNode = new Data(fields.get(0), val.doubleValue(), chartNode.get(IResponseHandler.VALUE_TYPE).asText());
dataList.add(dataNode);
}

if (chartNode.has(IS_CAPPED_BY_CAMPAIGN_PERIOD) && data.has((chartNode.get(IS_CAPPED_BY_CAMPAIGN_PERIOD).get(0).asText()))) {
Long capValue = data.findValues(chartNode.get(IS_CAPPED_BY_CAMPAIGN_PERIOD).get(0).asText()).get(0).get(IResponseHandler.VALUE).asLong();
Data dataNode = new Data(fields.get(0), capValue.doubleValue(), chartNode.get(IResponseHandler.VALUE_TYPE).asText());
capDataList.add(dataNode);
}
ComputeHelper computeHelper = computeHelperFactory.getInstance(RESPONSE_DIFF_DATES);
List<Data> computedData = computeHelper.compute(aggregateRequestDto, dataList);
List<Data> computedData = computeHelper.compute(aggregateRequestDto, dataList, capDataList);
noOpsNode.put(IResponseHandler.VALUE, ((Double) computedData.get(0).getHeaderValue()).longValue());
data.set(newField, noOpsNode);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
import org.springframework.stereotype.Component;
import com.tarento.analytics.dto.Data;
import com.tarento.analytics.dto.Plot;
import java.util.*;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
Expand All @@ -21,6 +20,11 @@ public List<Data> sort(String sortingKey, List<Data> dataList) {
}
return dataList;
}
public List<Data> tableSort(List<Data> dataList, String sortingKey) {
Comparator<Data> tableSortComparator = tableSortComparator(sortingKey);
dataList.sort(tableSortComparator);
return dataList;
}

private static Comparator<Plot> plotSortComparator(String sortingKey, Boolean isValueSortingApplicable) {
return new Comparator<Plot>() {
Expand Down Expand Up @@ -48,4 +52,18 @@ public int compare(Plot p1, Plot p2) {
};
}

private static Comparator<Data> tableSortComparator(String sortingKey) {
return (p1, p2) -> {
String plotName1 = p1.getHeaderName().toUpperCase();
String plotName2 = p2.getHeaderName().toUpperCase();

if (sortingKey.equals(SORT_KEY_ASC)) {
return plotName1.compareTo(plotName2);
} else if (sortingKey.equals(SORT_KEY_DESC)) {
return plotName2.compareTo(plotName1);
}
return 0;
};
}

}
Loading

0 comments on commit 9d058b2

Please sign in to comment.