Create Dockerfiles, create docker-compose

This commit is contained in:
Miroslav Vasilev 2024-02-05 19:27:58 +02:00
parent b2cee1d1c2
commit 932bd923d7
59 changed files with 3739 additions and 186 deletions

6
.idea/kotlinc.xml generated Normal file
View file

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="KotlinJpsPluginSettings">
<option name="version" value="1.9.21" />
</component>
</project>

View file

@ -0,0 +1,112 @@
version: '3.4'
services:
api-gateway:
build: ./pefi-api-gateway
ports:
- '8080:8080'
environment:
PROFILE: development
AUTHENTIK_CLIENT_ID: r72Ja9IIGBSoKpBsYTuJ2yBZMmJnXcWnLdW3Sgpp
AUTHENTIK_CLIENT_SECRET: LhhuUZlQPFPzGGEuxDhvlyBtten0LufRHx8I5ZH63031yHk7UdUboCR2WgNA4aSpmmFOz6TfkgpYHy1eh3jWeWUGpisPZxZ2PCJlSkJBtoF54MDh1iBZZSQ1gcD6r69H
AUTHENTIK_ISSUER_URL: https://auth.mvvasilev.dev/application/o/personal-finances/
AUTHENTIK_BACK_CHANNEL_LOGOUT_URL: https://auth.mvvasilev.dev/application/o/personal-finances/end-session/
GATEWAY_URI: http://localhost:8080
CORE_API_URI: http://core-api:8081
STATEMENTS_API_URI: http://statements-api:8081
WIDGETS_API_URI: http://widgets-api:8081
FRONTEND_URI: http://frontend:5173
REDIS_HOST: redis
REDIS_PORT: 6379
SSL_ENABLED: true
SSL_KEY_STORE_TYPE: PKCS12
SSL_KEY_STORE: classpath:keystore/local.p12
SSL_KEY_STORE_PASSWORD: asdf1234
SSL_KEY_ALIAS: local
frontend:
build: ./pefi-frontend
ports:
- '5173:5173'
core-api:
build: ./pefi-core-api
ports:
- '8081:8081'
environment:
PROFILE: 'development'
AUTHENTIK_ISSUER_URL: 'https://auth.mvvasilev.dev/application/o/personal-finances/'
DATASOURCE_URL: 'jdbc:postgresql://database:5432/finances'
DATASOURCE_USER: 'postgres'
DATASOURCE_PASSWORD: 'postgres'
KAFKA_SERVERS: 'kafka-broker:9092'
statements-api:
build: ./pefi-statements-api
ports:
- '8082:8081'
environment:
PROFILE: 'development'
AUTHENTIK_ISSUER_URL: 'https://auth.mvvasilev.dev/application/o/personal-finances/'
DATASOURCE_URL: 'jdbc:postgresql://database:5432/finances'
DATASOURCE_USER: 'postgres'
DATASOURCE_PASSWORD: 'postgres'
KAFKA_SERVERS: 'kafka-broker:9092'
widgets-api:
build: ./pefi-widgets-api
ports:
- '8083:8081'
environment:
PROFILE: 'development'
AUTHENTIK_ISSUER_URL: 'https://auth.mvvasilev.dev/application/o/personal-finances/'
DATASOURCE_URL: 'jdbc:postgresql://database:5432/finances'
DATASOURCE_USER: 'postgres'
DATASOURCE_PASSWORD: 'postgres'
redis:
image: redis/redis-stack:latest
ports:
- '6379:6379'
- '6380:8001'
database:
image: postgres:16.1-alpine
ports:
- '5432:5432'
environment:
POSTGRES_DB: 'finances'
POSTGRES_USER: 'postgres'
POSTGRES_PASSWORD: 'postgres'
kafka-broker:
image: confluentinc/cp-kafka:7.5.3
hostname: broker
container_name: broker
depends_on:
- zookeeper
ports:
- "29092:29092"
- "9092:9092"
- "9101:9101"
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9092
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
KAFKA_JMX_PORT: 9101
KAFKA_JMX_HOSTNAME: localhost
zookeeper:
image: confluentinc/cp-zookeeper:7.5.3
hostname: zookeeper
container_name: zookeeper
ports:
- "2181:2181"
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000

View file

@ -6,7 +6,9 @@ AUTHENTIK_ISSUER_URL= authentik issuer url ( dev: https://auth.mvvasilev.dev/app
AUTHENTIK_BACK_CHANNEL_LOGOUT_URL= authentik back channel logout url ( dev: https://auth.mvvasilev.dev/application/o/personal-finances/end-session/ )
GATEWAY_URI= http://localhost:8080
API_URI= http://localhost:8081
CORE_API_URI= http://localhost:8081
STATEMENTS_API_URI= http://localhost:8082
WIDGETS_API_URI= http://localhost:8083
FRONTEND_URI= http://localhost:5173
SSL_ENABLED= true if generated an ssl cert ( keytool -genkeypair -alias local -keyalg RSA -keysize 2048 -storetype PKCS12 -keystore local.p12 -validity 3650 )

View file

@ -0,0 +1,7 @@
FROM eclipse-temurin:21-jdk-alpine
COPY ./build/libs/pefi-api-gateway-0.0.1-SNAPSHOT.jar app.jar
EXPOSE 8081
ENTRYPOINT exec java $JAVA_OPTS -jar /app.jar $ARGS

View file

@ -0,0 +1,33 @@
spring:
cloud:
gateway:
routes:
- id: core-api
uri: ${CORE_API_URI}
order: 1
predicates:
- Path=/api/**
filters:
- RewritePath=/api/(?<segment>.*), /$\{segment}
- TokenRelay=
- id: statements-api
uri: ${STATEMENTS_API_URI}
order: 2
predicates:
- Path=/api/statements/**
filters:
- RewritePath=/api/(?<segment>.*), /$\{segment}
- TokenRelay=
- id: widgets-api
uri: ${WIDGETS_API_URI}
order: 3
predicates:
- Path=/api/widgets/**
filters:
- RewritePath=/api/(?<segment>.*), /$\{segment}
- TokenRelay=
- id: spa
order: 4
uri: ${FRONTEND_URI}
predicates:
- Path=/**

View file

@ -26,19 +26,31 @@ spring:
set-status:
original-status-header-name: Original-Status
routes:
- id: api
uri: ${API_URI}
- id: core-api
uri: ${CORE_API_URI}
order: 1
predicates:
- Path=/api/**
filters:
- RewritePath=/api/(?<segment>.*), /$\{segment}
- TokenRelay=
- id: spa
order: 10
uri: ${FRONTEND_URI}
- id: statements-api
uri: ${STATEMENTS_API_URI}
order: 2
predicates:
- Path=/**
- Path=/api/statements/**
filters:
- RewritePath=/api/(?<segment>.*), /$\{segment}
- TokenRelay=
- id: widgets-api
uri: ${WIDGETS_API_URI}
order: 3
predicates:
- Path=/api/widgets/**
filters:
- RewritePath=/api/(?<segment>.*), /$\{segment}
- TokenRelay=
server:
ssl:
enabled: ${SSL_ENABLED}

View file

@ -2,7 +2,7 @@ package dev.mvvasilev.common.dto;
import java.time.LocalDateTime;
public class CreateProcessedTransactionDTO {
public class KafkaProcessedTransactionDTO {
private String description;
@ -16,7 +16,7 @@ public class CreateProcessedTransactionDTO {
private Long statementId;
public CreateProcessedTransactionDTO() {
public KafkaProcessedTransactionDTO() {
}
public String getDescription() {

View file

@ -0,0 +1,10 @@
package dev.mvvasilev.common.dto;
import java.util.List;
public record KafkaReplaceProcessedTransactionsDTO(
Long statementId,
Integer userId,
List<KafkaProcessedTransactionDTO> transactions
) {
}

View file

@ -2,6 +2,8 @@ PROFILE= production/development
AUTHENTIK_ISSUER_URL= auth server configuration url for fetching JWKs ( dev: https://auth.mvvasilev.dev/application/o/personal-finances/ )
KAFKA_SERVERS= comma-delimited list of kafka servers to connect to
DATASOURCE_URL= database jdbc url ( postgres only, example: jdbc:postgresql://localhost:5432/mydatabase )
DATASOURCE_USER= database user
DATASOURCE_PASSWORD= database password

View file

@ -0,0 +1,7 @@
FROM eclipse-temurin:21-jdk-alpine
COPY ./build/libs/pefi-core-api-0.0.1-SNAPSHOT.jar app.jar
EXPOSE 8081
ENTRYPOINT exec java $JAVA_OPTS -jar /app.jar $ARGS

View file

@ -26,12 +26,15 @@ dependencies {
implementation 'org.springframework.boot:spring-boot-starter-oauth2-resource-server'
implementation 'org.springframework.boot:spring-boot-starter-validation'
implementation 'org.springframework.boot:spring-boot-starter-web'
implementation 'org.springframework.kafka:spring-kafka'
implementation 'org.flywaydb:flyway-core'
implementation 'org.apache.commons:commons-lang3:3.14.0'
implementation project(":pefi-common")
testImplementation 'org.springframework.kafka:spring-kafka-test'
runtimeOnly 'org.postgresql:postgresql'
testImplementation platform('org.junit:junit-bom:5.9.1')

View file

@ -0,0 +1,50 @@
package dev.mvvasilev.finances.configuration;
import dev.mvvasilev.common.dto.KafkaReplaceProcessedTransactionsDTO;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.support.serializer.JsonDeserializer;
import org.springframework.kafka.support.serializer.JsonSerializer;
import java.util.Map;
@Configuration
public class KafkaConfiguration {
public static final String REPLACE_TRANSACTIONS_TOPIC = "pefi.transactions.replace";
@Value(value = "${spring.kafka.bootstrap-servers}")
private String bootstrapAddress;
@Bean
public ConsumerFactory<String, KafkaReplaceProcessedTransactionsDTO> replaceTransactionsConsumerFactory() {
// ...
return new DefaultKafkaConsumerFactory<>(
Map.of(
ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress,
ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class,
ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class
),
new StringDeserializer(),
new JsonDeserializer<>(KafkaReplaceProcessedTransactionsDTO.class)
);
}
@Bean
public ConcurrentKafkaListenerContainerFactory<String, KafkaReplaceProcessedTransactionsDTO> replaceTransactionsKafkaListenerContainerFactory(
ConsumerFactory<String, KafkaReplaceProcessedTransactionsDTO> replaceTransactionsConsumerFactory
) {
ConcurrentKafkaListenerContainerFactory<String, KafkaReplaceProcessedTransactionsDTO> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(replaceTransactionsConsumerFactory);
return factory;
}
}

View file

@ -0,0 +1,28 @@
package dev.mvvasilev.finances.controllers;
import dev.mvvasilev.common.dto.KafkaReplaceProcessedTransactionsDTO;
import dev.mvvasilev.finances.configuration.KafkaConfiguration;
import dev.mvvasilev.finances.services.ProcessedTransactionService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
@Component
public class TransactionsKafkaListener {
private final ProcessedTransactionService service;
@Autowired
public TransactionsKafkaListener(ProcessedTransactionService service) {
this.service = service;
}
@KafkaListener(
topics = KafkaConfiguration.REPLACE_TRANSACTIONS_TOPIC,
containerFactory = "replaceTransactionsKafkaListenerContainerFactory"
)
public void replaceTransactionsListener(KafkaReplaceProcessedTransactionsDTO message) {
service.createOrReplaceProcessedTransactions(message.statementId(), message.userId(), message.transactions());
}
}

View file

@ -1,5 +1,6 @@
package dev.mvvasilev.finances.dtos;
import dev.mvvasilev.common.enums.ProcessedTransactionField;
import dev.mvvasilev.finances.enums.CategorizationRule;
import jakarta.validation.Valid;
import jakarta.validation.constraints.NotNull;

View file

@ -2,6 +2,7 @@ package dev.mvvasilev.finances.entity;
import dev.mvvasilev.common.data.AbstractEntity;
import dev.mvvasilev.common.data.UserOwned;
import dev.mvvasilev.common.enums.ProcessedTransactionField;
import dev.mvvasilev.finances.enums.CategorizationRule;
import jakarta.persistence.Convert;
import jakarta.persistence.Entity;

View file

@ -22,7 +22,7 @@ public interface ProcessedTransactionRepository extends JpaRepository<ProcessedT
Page<ProcessedTransaction> findAllByUserId(int userId, Pageable pageable);
@Query(value = "DELETE FROM transactions.processed_transaction WHERE statement_id = :statementId", nativeQuery = true)
@Query(value = "DELETE FROM transactions.processed_transaction WHERE statement_id = :statementId AND user_id = :userId", nativeQuery = true)
@Modifying
void deleteProcessedTransactionsForStatement(@Param("statementId") Long statementId);
void deleteProcessedTransactionsForStatement(@Param("statementId") Long statementId, @Param("userId") Integer userId);
}

View file

@ -1,5 +1,6 @@
package dev.mvvasilev.finances.services;
import dev.mvvasilev.common.dto.KafkaProcessedTransactionDTO;
import dev.mvvasilev.finances.dtos.ProcessedTransactionDTO;
import dev.mvvasilev.finances.dtos.TransactionCategoryDTO;
import dev.mvvasilev.finances.entity.ProcessedTransaction;
@ -10,6 +11,8 @@ import org.springframework.data.domain.*;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
@Service
@Transactional
public class ProcessedTransactionService {
@ -37,4 +40,25 @@ public class ProcessedTransactionService {
.toList()
));
}
public void createOrReplaceProcessedTransactions(Long statementId, Integer userId, List<KafkaProcessedTransactionDTO> transactions) {
processedTransactionRepository.deleteProcessedTransactionsForStatement(statementId, userId);
var entities = transactions.stream()
.map(t -> {
var entity = new ProcessedTransaction();
entity.setUserId(userId);
entity.setStatementId(statementId);
entity.setInflow(t.isInflow());
entity.setTimestamp(t.getTimestamp());
entity.setAmount(t.getAmount());
entity.setDescription(t.getDescription());
return entity;
})
.toList();
processedTransactionRepository.saveAllAndFlush(entities);
}
}

View file

@ -11,12 +11,17 @@ spring.datasource.username=${DATASOURCE_USER}
spring.datasource.password=${DATASOURCE_PASSWORD}
spring.datasource.driver-class-name=org.postgresql.Driver
spring.kafka.bootstrap-servers=${KAFKA_SERVERS}
spring.jpa.properties.hibernate.jdbc.batch_size=10
spring.jpa.properties.hibernate.order_inserts=true
spring.jpa.generate-ddl=false
spring.jpa.show-sql=true
spring.jpa.hibernate.ddl-auto=validate
spring.flyway.table=core_schema_history
spring.flyway.baseline-version=0.9
spring.flyway.baseline-on-migrate=true
# Security
jwt.issuer-url=${AUTHENTIK_ISSUER_URL}

View file

@ -1,3 +1,5 @@
CREATE SCHEMA IF NOT EXISTS transactions;
CREATE TABLE IF NOT EXISTS transactions.processed_transaction
(
id BIGSERIAL,
@ -9,6 +11,5 @@ CREATE TABLE IF NOT EXISTS transactions.processed_transaction
time_created TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
time_last_modified TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
statement_id BIGINT,
CONSTRAINT PK_processed_transaction PRIMARY KEY (id),
CONSTRAINT FK_processed_transaction_statement FOREIGN KEY (statement_id) REFERENCES transactions.raw_statement(id)
CONSTRAINT PK_processed_transaction PRIMARY KEY (id)
);

View file

@ -1,5 +1,6 @@
package dev.mvvasilev.finances;
import dev.mvvasilev.common.enums.ProcessedTransactionField;
import dev.mvvasilev.finances.entity.Categorization;
import dev.mvvasilev.finances.enums.CategorizationRule;
import org.apache.commons.lang3.RandomUtils;

11
pefi-frontend/Dockerfile Normal file
View file

@ -0,0 +1,11 @@
# This dockerfile is intended for development use only
FROM node:21 as nodebuilder
WORKDIR /app
COPY . .
RUN yarn install --prefer-offline --frozen-lockfile --non-interactive
CMD ["yarn", "dev", "--host"]

View file

@ -12,9 +12,5 @@ export default defineConfig({
},
optimizeDeps: {
include: ['@mui/material/Tooltip']
},
server: {
host: '127.0.0.1',
port: 5173,
}
})

2852
pefi-frontend/yarn.lock Normal file

File diff suppressed because it is too large Load diff

View file

@ -2,6 +2,8 @@ PROFILE= production/development
AUTHENTIK_ISSUER_URL= auth server configuration url for fetching JWKs ( dev: https://auth.mvvasilev.dev/application/o/personal-finances/ )
KAFKA_SERVERS= comma-delimited list of kafka servers to connect to
DATASOURCE_URL= database jdbc url ( postgres only, example: jdbc:postgresql://localhost:5432/mydatabase )
DATASOURCE_USER= database user
DATASOURCE_PASSWORD= database password

View file

@ -0,0 +1,7 @@
FROM eclipse-temurin:21-jdk-alpine
COPY ./build/libs/pefi-statements-api-0.0.1-SNAPSHOT.jar app.jar
EXPOSE 8081
ENTRYPOINT exec java $JAVA_OPTS -jar /app.jar $ARGS

View file

@ -17,6 +17,8 @@ dependencies {
implementation 'org.springframework.boot:spring-boot-starter-validation'
implementation 'org.springframework.boot:spring-boot-starter-web'
implementation 'org.springframework.kafka:spring-kafka'
implementation 'org.flywaydb:flyway-core'
implementation 'org.springdoc:springdoc-openapi-starter-common:2.3.0'
implementation 'org.springdoc:springdoc-openapi-starter-webmvc-ui:2.3.0'
@ -25,10 +27,14 @@ dependencies {
implementation project(":pefi-common")
testImplementation 'org.springframework.kafka:spring-kafka-test'
runtimeOnly 'org.postgresql:postgresql'
testImplementation platform('org.junit:junit-bom:5.9.1')
testImplementation 'org.junit.jupiter:junit-jupiter'
testImplementation 'org.mockito:mockito-core:5.7.0'
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8"
}
test {

View file

@ -0,0 +1,56 @@
package dev.mvvasilev.statements.configuration;
import dev.mvvasilev.common.dto.KafkaProcessedTransactionDTO;
import dev.mvvasilev.common.dto.KafkaReplaceProcessedTransactionsDTO;
import org.apache.kafka.clients.admin.AdminClientConfig;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaAdmin;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.support.serializer.JsonSerializer;
import java.util.Map;
@Configuration
public class KafkaConfiguration {
public static final String REPLACE_TRANSACTIONS_TOPIC = "pefi.transactions.replace";
@Value(value = "${spring.kafka.bootstrap-servers}")
private String bootstrapAddress;
@Bean
public KafkaAdmin kafkaAdmin() {
return new KafkaAdmin(Map.of(
AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress
));
}
@Bean
public NewTopic replaceTransactions() {
return new NewTopic(REPLACE_TRANSACTIONS_TOPIC, 1, (short) 1);
}
@Bean
public ProducerFactory<String, KafkaReplaceProcessedTransactionsDTO> replaceTransactionsProducerFactory() {
return new DefaultKafkaProducerFactory<>(Map.of(
ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress,
ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class,
ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class
));
}
@Bean
public KafkaTemplate<String, KafkaReplaceProcessedTransactionsDTO> replaceTransactionsKafkaTemplate(
ProducerFactory<String, KafkaReplaceProcessedTransactionsDTO> replaceTransactionsProducerFactory
) {
return new KafkaTemplate<>(replaceTransactionsProducerFactory);
}
}

View file

@ -7,6 +7,7 @@ import dev.mvvasilev.statements.dto.CreateTransactionMappingDTO;
import dev.mvvasilev.statements.dto.TransactionMappingDTO;
import dev.mvvasilev.statements.dto.TransactionValueGroupDTO;
import dev.mvvasilev.statements.dto.UploadedStatementDTO;
import dev.mvvasilev.statements.service.StatementParserService;
import dev.mvvasilev.statements.service.StatementsService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
@ -25,9 +26,12 @@ public class StatementsController extends AbstractRestController {
final private StatementsService statementsService;
final private StatementParserService statementParserService;
@Autowired
public StatementsController(StatementsService statementsService) {
public StatementsController(StatementsService statementsService, StatementParserService statementParserService1) {
this.statementsService = statementsService;
this.statementParserService = statementParserService1;
}
@GetMapping
@ -78,7 +82,7 @@ public class StatementsController extends AbstractRestController {
@PostMapping(value = "/uploadSheet", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
public ResponseEntity<APIResponseDTO<Object>> uploadStatement(@RequestParam("file") MultipartFile file, Authentication authentication) throws IOException {
statementsService.uploadStatementFromExcelSheetForUser(
statementParserService.uploadStatementFromExcelSheetForUser(
file.getOriginalFilename(),
file.getContentType(),
file.getInputStream(),

View file

@ -0,0 +1,252 @@
package dev.mvvasilev.statements.service;
import dev.mvvasilev.common.enums.RawTransactionValueType;
import dev.mvvasilev.statements.entity.RawStatement;
import dev.mvvasilev.statements.entity.RawTransactionValue;
import dev.mvvasilev.statements.entity.RawTransactionValueGroup;
import dev.mvvasilev.statements.persistence.RawStatementRepository;
import dev.mvvasilev.statements.persistence.RawTransactionValueGroupRepository;
import dev.mvvasilev.statements.persistence.RawTransactionValueRepository;
import dev.mvvasilev.statements.service.dtos.ParsedStatementDTO;
import org.apache.poi.ss.usermodel.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.io.IOException;
import java.io.InputStream;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeFormatterBuilder;
import java.time.format.DateTimeParseException;
import java.time.format.ResolverStyle;
import java.time.temporal.ChronoField;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.stream.IntStream;
import static dev.mvvasilev.common.enums.RawTransactionValueType.*;
@Service
public class StatementParserService {
private static final DateTimeFormatter DEFAULT_DATE_FORMAT = new DateTimeFormatterBuilder()
.appendPattern("dd.MM.yyyy[ [HH][:mm][:ss]]")
.parseDefaulting(ChronoField.HOUR_OF_DAY, 0)
.parseDefaulting(ChronoField.MINUTE_OF_HOUR, 0)
.parseDefaulting(ChronoField.SECOND_OF_MINUTE, 0)
.toFormatter()
.withResolverStyle(ResolverStyle.LENIENT);
private final RawStatementRepository rawStatementRepository;
private final RawTransactionValueGroupRepository rawTransactionValueGroupRepository;
private final RawTransactionValueRepository rawTransactionValueRepository;
@Autowired
public StatementParserService(RawStatementRepository rawStatementRepository, RawTransactionValueGroupRepository rawTransactionValueGroupRepository, RawTransactionValueRepository rawTransactionValueRepository) {
this.rawStatementRepository = rawStatementRepository;
this.rawTransactionValueGroupRepository = rawTransactionValueGroupRepository;
this.rawTransactionValueRepository = rawTransactionValueRepository;
}
public void uploadStatementFromExcelSheetForUser(final String fileName, final String mimeType, final InputStream workbookInputStream, final int userId) throws IOException {
var workbook = WorkbookFactory.create(workbookInputStream);
var firstWorksheet = workbook.getSheetAt(0);
parseSheet(firstWorksheet, userId, fileName);
}
protected ParsedStatementDTO parseSheet(Sheet sheet, final int userId, final String fileName) {
var lastRowIndex = sheet.getLastRowNum();
var statement = new RawStatement();
statement.setUserId(userId);
statement.setName(fileName);
statement = rawStatementRepository.saveAndFlush(statement);
var firstRow = sheet.getRow(0);
List<RawTransactionValueGroup> valueGroups = new ArrayList<>();
// turn each column into a value group
for (var c : firstRow) {
if (c == null || c.getCellType() == CellType.BLANK) {
break;
}
var valueGroup = parseValueGroup(c.getStringCellValue(), sheet, c.getRowIndex(), lastRowIndex, c.getColumnIndex());
valueGroup.setStatementId(statement.getId());
valueGroups.add(valueGroup);
}
valueGroups = rawTransactionValueGroupRepository.saveAllAndFlush(valueGroups);
var column = 0;
List<RawTransactionValue> allValues = new ArrayList<>();
// turn each cell in each row into a value, related to the value group ( column )
for (var group : valueGroups) {
var values = parseValuesForColumn(group, sheet, column, lastRowIndex);
allValues.addAll(values);
column++;
}
allValues = rawTransactionValueRepository.saveAllAndFlush(allValues);
return new ParsedStatementDTO(
statement,
valueGroups,
allValues
);
}
protected RawTransactionValueGroup parseValueGroup(String name, Sheet worksheet, int rowIndex, int lastRowIndex, int columnIndex) {
var transactionValueGroup = new RawTransactionValueGroup();
transactionValueGroup.setName(name);
// group type is string by default, if no other type could have been determined
var groupType = STRING;
// iterate down through the rows on this column, looking for the first one to return a type
for (int y = rowIndex + 1; y <= lastRowIndex; y++) {
var typeResult = determineGroupType(worksheet, y, columnIndex);
// if a type has been determined, stop here
if (typeResult.isPresent()) {
groupType = typeResult.get();
break;
}
}
transactionValueGroup.setType(groupType);
return transactionValueGroup;
}
protected List<RawTransactionValue> parseValuesForColumn(RawTransactionValueGroup group, Sheet worksheet, int x, int lastRowIndex) {
return IntStream.range(1, lastRowIndex + 1).mapToObj(y -> parseValueFromCell(group, worksheet, x, y)).toList();
}
protected RawTransactionValue parseValueFromCell(RawTransactionValueGroup group, Sheet worksheet, int x, int y) {
var value = new RawTransactionValue();
value.setGroupId(group.getId());
value.setRowIndex(y);
var cell = worksheet.getRow(y).getCell(x);
if (cell.getCellType() == CellType.STRING) {
var cellValue = cell.getStringCellValue().trim();
try {
switch (group.getType()) {
case STRING -> value.setStringValue(cellValue);
case NUMERIC -> value.setNumericValue(Double.parseDouble(cellValue));
case TIMESTAMP -> value.setTimestampValue(LocalDateTime.parse(cellValue, DEFAULT_DATE_FORMAT));
case BOOLEAN -> value.setBooleanValue(Boolean.parseBoolean(cellValue));
}
} catch (Exception e) {
switch (group.getType()) {
case STRING -> value.setStringValue("");
case NUMERIC -> value.setNumericValue(0.0);
case TIMESTAMP -> value.setTimestampValue(LocalDateTime.ofEpochSecond(0, 0, ZoneOffset.UTC));
case BOOLEAN -> value.setBooleanValue(false);
}
}
return value;
}
if (cell.getCellType() == CellType.BOOLEAN) {
var cellValue = worksheet.getRow(y).getCell(x).getBooleanCellValue();
switch (group.getType()) {
case STRING -> value.setStringValue(Boolean.toString(cellValue));
case NUMERIC -> value.setNumericValue(0.0);
case TIMESTAMP -> value.setTimestampValue(LocalDateTime.ofEpochSecond(0, 0, ZoneOffset.UTC));
case BOOLEAN -> value.setBooleanValue(cellValue);
}
return value;
}
if (DateUtil.isCellDateFormatted(cell)) {
var cellValue = cell.getLocalDateTimeCellValue();
switch (group.getType()) {
case STRING -> value.setStringValue("");
case NUMERIC -> value.setNumericValue(0.0);
case TIMESTAMP -> value.setTimestampValue(cellValue);
case BOOLEAN -> value.setBooleanValue(false);
}
return value;
}
var cellValue = cell.getNumericCellValue();
switch (group.getType()) {
case STRING -> value.setStringValue(Double.toString(cellValue));
case NUMERIC -> value.setNumericValue(cellValue);
case TIMESTAMP -> value.setTimestampValue(LocalDateTime.ofEpochSecond(0, 0, ZoneOffset.UTC));
case BOOLEAN -> value.setBooleanValue(false);
}
return value;
}
protected Optional<RawTransactionValueType> determineGroupType(final Sheet worksheet, final int rowIndex, final int columnIndex) {
var cell = worksheet.getRow(rowIndex).getCell(columnIndex);
if (cell == null || cell.getCellType() == CellType.BLANK) {
return Optional.empty();
}
if (cell.getCellType() == CellType.BOOLEAN) {
return Optional.of(BOOLEAN);
}
if (cell.getCellType() == CellType.STRING) {
return Optional.of(STRING);
}
if (DateUtil.isCellDateFormatted(cell)) {
return Optional.of(TIMESTAMP);
}
if (cell.getCellType() == CellType.NUMERIC) {
return Optional.of(NUMERIC);
}
var cellValue = cell.getStringCellValue();
if (isValidDate(cellValue, DEFAULT_DATE_FORMAT)) {
return Optional.of(RawTransactionValueType.TIMESTAMP);
}
return Optional.empty();
}
protected boolean isValidDate(String stringDate, DateTimeFormatter formatter) {
try {
formatter.parse(stringDate);
} catch (DateTimeParseException e) {
return false;
}
return true;
}
}

View file

@ -1,37 +1,30 @@
package dev.mvvasilev.statements.service;
import dev.mvvasilev.common.dto.CreateProcessedTransactionDTO;
import dev.mvvasilev.common.dto.KafkaProcessedTransactionDTO;
import dev.mvvasilev.common.dto.KafkaReplaceProcessedTransactionsDTO;
import dev.mvvasilev.common.dto.ProcessedTransactionFieldDTO;
import dev.mvvasilev.common.enums.ProcessedTransactionField;
import dev.mvvasilev.common.enums.RawTransactionValueType;
import dev.mvvasilev.common.exceptions.CommonFinancesException;
import dev.mvvasilev.common.web.CrudResponseDTO;
import dev.mvvasilev.statements.configuration.KafkaConfiguration;
import dev.mvvasilev.statements.dto.*;
import dev.mvvasilev.statements.entity.RawStatement;
import dev.mvvasilev.statements.entity.RawTransactionValue;
import dev.mvvasilev.statements.entity.RawTransactionValueGroup;
import dev.mvvasilev.statements.entity.TransactionMapping;
import dev.mvvasilev.statements.enums.MappingConversionType;
import dev.mvvasilev.statements.persistence.RawStatementRepository;
import dev.mvvasilev.statements.persistence.RawTransactionValueGroupRepository;
import dev.mvvasilev.statements.persistence.RawTransactionValueRepository;
import dev.mvvasilev.statements.persistence.TransactionMappingRepository;
import org.apache.poi.ss.usermodel.CellType;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.WorkbookFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.io.IOException;
import java.io.InputStream;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeFormatterBuilder;
import java.time.format.DateTimeParseException;
import java.time.format.ResolverStyle;
import java.time.temporal.ChronoField;
import java.util.*;
@ -40,20 +33,11 @@ import java.util.stream.Collectors;
import static dev.mvvasilev.common.enums.ProcessedTransactionField.*;
import static dev.mvvasilev.common.enums.ProcessedTransactionField.TIMESTAMP;
import static dev.mvvasilev.common.enums.RawTransactionValueType.*;
@Service
@Transactional
public class StatementsService {
private static final DateTimeFormatter DEFAULT_DATE_FORMAT = new DateTimeFormatterBuilder()
.appendPattern("dd.MM.yyyy[ [HH][:mm][:ss]]")
.parseDefaulting(ChronoField.HOUR_OF_DAY, 0)
.parseDefaulting(ChronoField.MINUTE_OF_HOUR, 0)
.parseDefaulting(ChronoField.SECOND_OF_MINUTE, 0)
.toFormatter()
.withResolverStyle(ResolverStyle.LENIENT);
private final Logger logger = LoggerFactory.getLogger(StatementsService.class);
private final RawStatementRepository rawStatementRepository;
@ -64,153 +48,21 @@ public class StatementsService {
private final TransactionMappingRepository transactionMappingRepository;
// TODO: send processed transactions to be stored via message broker
// private final ProcessedTransactionRepository processedTransactionRepository;
private final KafkaTemplate<String, KafkaReplaceProcessedTransactionsDTO> replaceTransactionsKafkaTemplate;
@Autowired
public StatementsService(RawStatementRepository rawStatementRepository, RawTransactionValueGroupRepository rawTransactionValueGroupRepository, RawTransactionValueRepository rawTransactionValueRepository, TransactionMappingRepository transactionMappingRepository) {
public StatementsService(
RawStatementRepository rawStatementRepository,
RawTransactionValueGroupRepository rawTransactionValueGroupRepository,
RawTransactionValueRepository rawTransactionValueRepository,
TransactionMappingRepository transactionMappingRepository,
KafkaTemplate<String, KafkaReplaceProcessedTransactionsDTO> replaceTransactionsKafkaTemplate
) {
this.rawStatementRepository = rawStatementRepository;
this.rawTransactionValueGroupRepository = rawTransactionValueGroupRepository;
this.rawTransactionValueRepository = rawTransactionValueRepository;
this.transactionMappingRepository = transactionMappingRepository;
// this.processedTransactionRepository = processedTransactionRepository;
}
public void uploadStatementFromExcelSheetForUser(final String fileName, final String mimeType, final InputStream workbookInputStream, final int userId) throws IOException {
var workbook = WorkbookFactory.create(workbookInputStream);
var firstWorksheet = workbook.getSheetAt(0);
var lastRowIndex = firstWorksheet.getLastRowNum();
var statement = new RawStatement();
statement.setUserId(userId);
statement.setName(fileName);
statement = rawStatementRepository.saveAndFlush(statement);
var firstRow = firstWorksheet.getRow(0);
List<RawTransactionValueGroup> valueGroups = new ArrayList<>();
// turn each column into a value group
for (var c : firstRow) {
if (c == null || c.getCellType() == CellType.BLANK) {
break;
}
var transactionValueGroup = new RawTransactionValueGroup();
transactionValueGroup.setStatementId(statement.getId());
transactionValueGroup.setName(c.getStringCellValue());
// group type is string by default, if no other type could have been determined
var groupType = STRING;
// iterate down through the rows on this column, looking for the first one to return a type
for (int y = c.getRowIndex() + 1; y <= lastRowIndex; y++) {
var typeResult = determineGroupType(firstWorksheet, y, c.getColumnIndex());
// if a type has been determined, stop here
if (typeResult.isPresent()) {
groupType = typeResult.get();
break;
}
}
transactionValueGroup.setType(groupType);
valueGroups.add(transactionValueGroup);
}
valueGroups = rawTransactionValueGroupRepository.saveAllAndFlush(valueGroups);
var column = 0;
// turn each cell in each row into a value, related to the value group ( column )
for (var group : valueGroups) {
var groupType = group.getType();
var valueList = new ArrayList<RawTransactionValue>();
for (int y = 1; y < lastRowIndex; y++) {
var value = new RawTransactionValue();
value.setGroupId(group.getId());
value.setRowIndex(y);
try {
var cellValue = firstWorksheet.getRow(y).getCell(column).getStringCellValue().trim();
try {
switch (groupType) {
case STRING -> value.setStringValue(cellValue);
case NUMERIC -> value.setNumericValue(Double.parseDouble(cellValue));
case TIMESTAMP -> value.setTimestampValue(LocalDateTime.parse(cellValue, DEFAULT_DATE_FORMAT));
case BOOLEAN -> value.setBooleanValue(Boolean.parseBoolean(cellValue));
}
} catch (Exception e) {
switch (groupType) {
case STRING -> value.setStringValue("");
case NUMERIC -> value.setNumericValue(0.0);
case TIMESTAMP -> value.setTimestampValue(LocalDateTime.ofEpochSecond(0, 0, ZoneOffset.UTC));
case BOOLEAN -> value.setBooleanValue(false);
}
}
} catch (IllegalStateException e) {
// Cell was numeric
var cellValue = firstWorksheet.getRow(y).getCell(column).getNumericCellValue();
switch (groupType) {
case STRING -> value.setStringValue(Double.toString(cellValue));
case NUMERIC -> value.setNumericValue(cellValue);
case TIMESTAMP -> value.setTimestampValue(LocalDateTime.ofEpochSecond(0, 0, ZoneOffset.UTC));
case BOOLEAN -> value.setBooleanValue(false);
}
}
valueList.add(value);
}
rawTransactionValueRepository.saveAllAndFlush(valueList);
column++;
}
}
private Optional<RawTransactionValueType> determineGroupType(final Sheet worksheet, final int rowIndex, final int columnIndex) {
var cell = worksheet.getRow(rowIndex).getCell(columnIndex);
if (cell == null || cell.getCellType() == CellType.BLANK) {
return Optional.empty();
}
if (cell.getCellType() == CellType.BOOLEAN) {
return Optional.of(BOOLEAN);
}
if (cell.getCellType() == CellType.NUMERIC) {
return Optional.of(NUMERIC);
}
var cellValue = cell.getStringCellValue();
if (isValidDate(cellValue)) {
return Optional.of(RawTransactionValueType.TIMESTAMP);
}
return Optional.empty();
}
private boolean isValidDate(String stringDate) {
try {
DEFAULT_DATE_FORMAT.parse(stringDate);
} catch (DateTimeParseException e) {
return false;
}
return true;
this.replaceTransactionsKafkaTemplate = replaceTransactionsKafkaTemplate;
}
public Collection<UploadedStatementDTO> fetchStatementsForUser(final int userId) {
@ -303,8 +155,11 @@ public class StatementsService {
})
.toList();
// TODO: Over kafka, delete previous transactions, and create the new ones
processedTransactionRepository.saveAllAndFlush(processedTransactions);
replaceTransactionsKafkaTemplate.send(KafkaConfiguration.REPLACE_TRANSACTIONS_TOPIC, new KafkaReplaceProcessedTransactionsDTO(
statementId,
userId,
processedTransactions
));
}
// This const is a result of the limitations of the JVM.
@ -312,7 +167,7 @@ public class StatementsService {
// Because of this, it is very difficult to tie the ProcessedTransactionField values to the actual class fields they represent.
// To resolve this imperfection, this const lives here, in plain view, so when one of the fields is changed,
// Hopefully the programmer remembers to change the value inside as well.
private static final Map<ProcessedTransactionField, BiConsumer<CreateProcessedTransactionDTO, Object>> FIELD_SETTERS = Map.ofEntries(
private static final Map<ProcessedTransactionField, BiConsumer<KafkaProcessedTransactionDTO, Object>> FIELD_SETTERS = Map.ofEntries(
Map.entry(
DESCRIPTION,
(pt, value) -> pt.setDescription((String) value)
@ -331,8 +186,8 @@ public class StatementsService {
)
);
private CreateProcessedTransactionDTO mapValuesToTransaction(List<RawTransactionValue> values, final Collection<TransactionMapping> mappings) {
final var processedTransaction = new CreateProcessedTransactionDTO();
private KafkaProcessedTransactionDTO mapValuesToTransaction(List<RawTransactionValue> values, final Collection<TransactionMapping> mappings) {
final var processedTransaction = new KafkaProcessedTransactionDTO();
values.forEach(value -> {
final var mapping = mappings.stream()

View file

@ -0,0 +1,14 @@
package dev.mvvasilev.statements.service.dtos;
import dev.mvvasilev.statements.entity.RawStatement;
import dev.mvvasilev.statements.entity.RawTransactionValue;
import dev.mvvasilev.statements.entity.RawTransactionValueGroup;
import java.util.List;
public record ParsedStatementDTO(
RawStatement statement,
List<RawTransactionValueGroup> groups,
List<RawTransactionValue> values
)
{ }

View file

@ -11,12 +11,17 @@ spring.datasource.username=${DATASOURCE_USER}
spring.datasource.password=${DATASOURCE_PASSWORD}
spring.datasource.driver-class-name=org.postgresql.Driver
spring.kafka.bootstrap-servers=${KAFKA_SERVERS}
spring.jpa.properties.hibernate.jdbc.batch_size=10
spring.jpa.properties.hibernate.order_inserts=true
spring.jpa.generate-ddl=false
spring.jpa.show-sql=true
spring.jpa.hibernate.ddl-auto=validate
spring.flyway.table=statements_schema_history
spring.flyway.baseline-version=0.9
spring.flyway.baseline-on-migrate=true
# Security
jwt.issuer-url=${AUTHENTIK_ISSUER_URL}

View file

@ -0,0 +1 @@
ALTER TABLE transactions.transaction_mapping ADD COLUMN timestamp_pattern VARCHAR(255) NULL;

View file

@ -0,0 +1,177 @@
package dev.mvvasilev.statements.service;
import dev.mvvasilev.common.enums.RawTransactionValueType;
import dev.mvvasilev.statements.entity.RawStatement;
import dev.mvvasilev.statements.entity.RawTransactionValue;
import dev.mvvasilev.statements.entity.RawTransactionValueGroup;
import dev.mvvasilev.statements.persistence.RawStatementRepository;
import dev.mvvasilev.statements.persistence.RawTransactionValueGroupRepository;
import dev.mvvasilev.statements.persistence.RawTransactionValueRepository;
import org.apache.commons.lang3.IntegerRange;
import org.apache.poi.ss.usermodel.WorkbookFactory;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mock;
import org.mockito.MockSettings;
import org.mockito.Mockito;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.OpenOption;
import java.nio.file.Path;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.List;
import java.util.stream.IntStream;
import java.util.stream.LongStream;
import static org.junit.jupiter.api.Assertions.*;
class StatementParserServiceTest {
private static final String XLS_TEST_PATH = "src/test/resources/xls-test.xls";
private static final String XLSX_TEST_PATH = "src/test/resources/xlsx-test.xlsx";
private static final RawStatementRepository rawStatementRepository = Mockito.mock(RawStatementRepository.class);
private static final RawTransactionValueGroupRepository rawTransactionValueGroupRepository = Mockito.mock(RawTransactionValueGroupRepository.class);
private static final RawTransactionValueRepository rawTransactionValueRepository = Mockito.mock(RawTransactionValueRepository.class);
private StatementParserService service;
@BeforeAll
static void beforeAll() {
Mockito.when(rawStatementRepository.saveAndFlush(Mockito.any(RawStatement.class))).thenAnswer((input) -> {
((RawStatement) input.getArguments()[0]).setId(1L);
return input.getArguments()[0];
});
Mockito.when(rawTransactionValueGroupRepository.saveAllAndFlush(Mockito.anyList())).thenAnswer((input) -> {
var inputList = (List<RawTransactionValueGroup>) input.getArguments()[0];
var range = LongStream.range(0, inputList.size()).iterator();
return inputList.stream().peek(r -> r.setId(range.next())).toList();
});
Mockito.when(rawTransactionValueRepository.saveAllAndFlush(Mockito.anyList())).thenAnswer((input) -> {
var inputList = (List<RawTransactionValue>) input.getArguments()[0];
var range = LongStream.range(0, inputList.size()).iterator();
return inputList.stream().peek(r -> r.setId(range.next())).toList();
});
}
@BeforeEach
void setUp() {
service = new StatementParserService(
rawStatementRepository,
rawTransactionValueGroupRepository,
rawTransactionValueRepository
);
}
@Test
void parseSheet() throws IOException {
var xlsWorkbook = WorkbookFactory.create(Files.newInputStream(Path.of(XLS_TEST_PATH)));
var xlsxWorkbook = WorkbookFactory.create(Files.newInputStream(Path.of(XLSX_TEST_PATH)));
var xlsResult = service.parseSheet(
xlsWorkbook.getSheetAt(0),
0,
"xls-test.xls"
);
var xlsxResult = service.parseSheet(
xlsxWorkbook.getSheetAt(0),
0,
"xlsx-test.xlsx"
);
Assertions.assertEquals("xls-test.xls", xlsResult.statement().getName());
Assertions.assertEquals(4, xlsResult.groups().size());
Assertions.assertEquals("xlsx-test.xlsx", xlsxResult.statement().getName());
Assertions.assertEquals(4, xlsxResult.groups().size());
// === XLS Value Groups ===
Assertions.assertEquals(RawTransactionValueType.STRING, xlsResult.groups().getFirst().getType());
Assertions.assertEquals("Column A", xlsResult.groups().getFirst().getName());
Assertions.assertEquals(xlsResult.statement().getId(), xlsResult.groups().getFirst().getStatementId());
Assertions.assertEquals(RawTransactionValueType.NUMERIC, xlsResult.groups().get(1).getType());
Assertions.assertEquals("Column B", xlsResult.groups().get(1).getName());
Assertions.assertEquals(xlsResult.statement().getId(), xlsResult.groups().get(1).getStatementId());
Assertions.assertEquals(RawTransactionValueType.TIMESTAMP, xlsResult.groups().get(2).getType());
Assertions.assertEquals("Column C", xlsResult.groups().get(2).getName());
Assertions.assertEquals(xlsResult.statement().getId(), xlsResult.groups().get(2).getStatementId());
Assertions.assertEquals(RawTransactionValueType.BOOLEAN, xlsResult.groups().get(3).getType());
Assertions.assertEquals("Column D", xlsResult.groups().get(3).getName());
Assertions.assertEquals(xlsResult.statement().getId(), xlsResult.groups().get(3).getStatementId());
// === XLSX Value Groups ===
Assertions.assertEquals(RawTransactionValueType.STRING, xlsxResult.groups().getFirst().getType());
Assertions.assertEquals("Column A", xlsxResult.groups().getFirst().getName());
Assertions.assertEquals(xlsResult.statement().getId(), xlsxResult.groups().getFirst().getStatementId());
Assertions.assertEquals(RawTransactionValueType.NUMERIC, xlsxResult.groups().get(1).getType());
Assertions.assertEquals("Column B", xlsxResult.groups().get(1).getName());
Assertions.assertEquals(xlsxResult.statement().getId(), xlsxResult.groups().get(1).getStatementId());
Assertions.assertEquals(RawTransactionValueType.TIMESTAMP, xlsxResult.groups().get(2).getType());
Assertions.assertEquals("Column C", xlsxResult.groups().get(2).getName());
Assertions.assertEquals(xlsxResult.statement().getId(), xlsxResult.groups().get(2).getStatementId());
Assertions.assertEquals(RawTransactionValueType.BOOLEAN, xlsxResult.groups().get(3).getType());
Assertions.assertEquals("Column D", xlsxResult.groups().get(3).getName());
Assertions.assertEquals(xlsxResult.statement().getId(), xlsxResult.groups().get(3).getStatementId());
// === XLS Values ===
Assertions.assertEquals(xlsResult.groups().getFirst().getId(), xlsResult.values().getFirst().getGroupId());
Assertions.assertEquals(xlsResult.values().getFirst().getRowIndex(), 1);
Assertions.assertEquals("Text a", xlsResult.values().getFirst().getStringValue());
Assertions.assertEquals(xlsResult.groups().get(1).getId(), xlsResult.values().get(6).getGroupId());
Assertions.assertEquals(1, xlsResult.values().get(6).getRowIndex());
Assertions.assertEquals(0.12, xlsResult.values().get(6).getNumericValue());
Assertions.assertEquals(xlsResult.groups().get(2).getId(), xlsResult.values().get(12).getGroupId());
Assertions.assertEquals(1, xlsResult.values().get(12).getRowIndex());
Assertions.assertEquals(LocalDateTime.of(LocalDate.of(1990, 1, 1), LocalTime.of(0, 0, 0)), xlsResult.values().get(12).getTimestampValue());
Assertions.assertEquals(xlsResult.groups().get(3).getId(), xlsResult.values().get(18).getGroupId());
Assertions.assertEquals(1, xlsResult.values().get(18).getRowIndex());
Assertions.assertEquals(true, xlsResult.values().get(18).getBooleanValue());
// === XLSX Values ===
Assertions.assertEquals(xlsxResult.groups().getFirst().getId(), xlsxResult.values().getFirst().getGroupId());
Assertions.assertEquals(xlsxResult.values().getFirst().getRowIndex(), 1);
Assertions.assertEquals("Text a", xlsxResult.values().getFirst().getStringValue());
Assertions.assertEquals(xlsxResult.groups().get(1).getId(), xlsxResult.values().get(6).getGroupId());
Assertions.assertEquals(1, xlsxResult.values().get(6).getRowIndex());
Assertions.assertEquals(0.12, xlsxResult.values().get(6).getNumericValue());
Assertions.assertEquals(xlsxResult.groups().get(2).getId(), xlsxResult.values().get(12).getGroupId());
Assertions.assertEquals(1, xlsxResult.values().get(12).getRowIndex());
Assertions.assertEquals(LocalDateTime.of(LocalDate.of(1990, 1, 1), LocalTime.of(0, 0, 0)), xlsxResult.values().get(12).getTimestampValue());
Assertions.assertEquals(xlsxResult.groups().get(3).getId(), xlsxResult.values().get(18).getGroupId());
Assertions.assertEquals(1, xlsxResult.values().get(18).getRowIndex());
Assertions.assertEquals(true, xlsxResult.values().get(18).getBooleanValue());
}
}

Binary file not shown.

Binary file not shown.

View file

@ -0,0 +1,7 @@
FROM eclipse-temurin:21-jdk-alpine
COPY ./build/libs/pefi-widgets-api-0.0.1-SNAPSHOT.jar app.jar
EXPOSE 8081
ENTRYPOINT exec java $JAVA_OPTS -jar /app.jar $ARGS

View file

@ -17,6 +17,9 @@ spring.jpa.properties.hibernate.order_inserts=true
spring.jpa.generate-ddl=false
spring.jpa.show-sql=true
spring.jpa.hibernate.ddl-auto=validate
spring.flyway.table=widgets_schema_history
spring.flyway.baseline-version=0.9
spring.flyway.baseline-on-migrate=true
# Security
jwt.issuer-url=${AUTHENTIK_ISSUER_URL}

View file

@ -1,8 +1,8 @@
rootProject.name = 'pefi'
include 'pefi-common'
include 'pefi-frontend'
include 'pefi-api-gateway'
include 'pefi-common'
include 'pefi-core-api'
include 'pefi-statements-api'
include 'pefi-widgets-api'