Skip to content

Commit

Permalink
Merge pull request #111 from 2023-Team-Joon-CheckIt/BE/fix/#107
Browse files Browse the repository at this point in the history
[fix/#107] Elasticsearch logstash 데이터 색인 방식으로 수정 → book Id 이슈 해결
  • Loading branch information
gmlrude authored Oct 29, 2023
2 parents f3846ae + b2d0370 commit 8070f25
Show file tree
Hide file tree
Showing 9 changed files with 115 additions and 37 deletions.
6 changes: 4 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -40,5 +40,7 @@ out/
gradle.properties
.env
data
elasticsearch
mongodb
elk/elasticsearch
mongodb
config/config.toml
elk/logstash/pipeline/logstash.conf
76 changes: 51 additions & 25 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -99,41 +99,67 @@ services:
soft: -1
hard: -1
volumes:
- ./elasticsearch/data:/usr/share/elasticsearch/data
- ./elk/elasticsearch/data:/usr/share/elasticsearch/data
networks:
- default_bridge

kibana:
restart: always
image: docker.elastic.co/kibana/kibana:8.8.1
expose:
- 5601
ports:
- 5601:5601
depends_on:
- elasticsearch
environment:
- SERVER_PORT=5601
- SERVER_NAME=kibana.example.org
- ELASTICSEARCH_HOSTS=http://elasticsearch:9200
networks:
- default_bridge
# kibana:
# restart: always
# image: docker.elastic.co/kibana/kibana:8.8.1
# expose:
# - 5601
# ports:
# - 5601:5601
# depends_on:
# - elasticsearch
# environment:
# - SERVER_PORT=5601
# - SERVER_NAME=kibana.example.org
# - ELASTICSEARCH_HOSTS=http://elasticsearch:9200
# networks:
# - default_bridge

monstache:
restart: always
image: rwynn/monstache:rel6
command: -f ./config.toml &
# filebeat:
# container_name: filebeat
# image: docker.elastic.co/beats/filebeat:8.8.1
# volumes:
# - ./filebeat/filebeat.yml:/usr/share/filebeat/filebeat.yml
# depends_on:
# - logstash
# networks:
# - default_bridge

logstash:
container_name: logstash
image: docker.elastic.co/logstash/logstash:8.8.1
build:
context: ./elk
volumes:
- ./config/config.toml:/config.toml
- ./elk/logstash/pipeline/logstash.conf:/usr/share/logstash/pipeline/logstash.conf
ports:
- "5044:5044"
environment:
- "xpack.monitoring.enabled=false"
depends_on:
- elasticsearch
links:
- elasticsearch
ports:
- "8081:8081"
networks:
- default_bridge

# monstache:
# restart: always
# image: rwynn/monstache:rel6
# command: -f ./config.toml &
# volumes:
# - ./config/config.toml:/config.toml
# depends_on:
# - elasticsearch
# links:
# - elasticsearch
# ports:
# - "8081:8081"
# networks:
# - default_bridge

networks:
default_bridge:
ipam:
Expand Down
12 changes: 12 additions & 0 deletions elk/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
FROM docker.elastic.co/logstash/logstash:8.8.1

ENV MYSQL_CONNECTOR_J_VERSION 8.0.23
ENV MYSQL_CONNECTOR_J_URL https://dev.mysql.com/get/Downloads/Connector-J/mysql-connector-java-$MYSQL_CONNECTOR_J_VERSION.tar.gz

# 드라이버 다운로드 및 복사
RUN curl -L -O $MYSQL_CONNECTOR_J_URL
RUN tar -xvf mysql-connector-java-$MYSQL_CONNECTOR_J_VERSION.tar.gz
RUN cp mysql-connector-java-$MYSQL_CONNECTOR_J_VERSION/mysql-connector-java-$MYSQL_CONNECTOR_J_VERSION.jar /usr/share/logstash/mysql-connector-java-$MYSQL_CONNECTOR_J_VERSION.jar
RUN rm -rf mysql-connector-java-$MYSQL_CONNECTOR_J_VERSION.tar.gz mysql-connector-java-$MYSQL_CONNECTOR_J_VERSION

CMD ["logstash", "-f", "/usr/share/logstash/pipeline/logstash.conf"]
32 changes: 32 additions & 0 deletions elk/logstash/pipeline/logstash.conf.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
input {
jdbc {
jdbc_driver_library => "/usr/share/logstash/mysql-connector-java-8.0.23.jar"
jdbc_driver_class => "com.mysql.cj.jdbc.Driver"
jdbc_connection_string => "RDS 엔드포인트"
jdbc_user => "유저"
jdbc_password => "비밀번호"
jdbc_paging_enabled => true
tracking_column => "unix_ts_in_secs"
use_column_value => true
tracking_column_type => "numeric"
schedule => "*/5 * * * * *"
statement => "SELECT *, UNIX_TIMESTAMP(created_at) AS unix_ts_in_secs FROM books WHERE (UNIX_TIMESTAMP(created_at) > :sql_last_value AND created_at < NOW()) ORDER BY created_at ASC"
last_run_metadata_path => "/usr/share/logstash/.logstash_jdbc_last_run"
}
}
filter {
mutate {
copy => {"book_id" => "[@metadata][_id]"}
copy => {"cover_image_url" => "coverImageUrl"}
copy => {"created_at" => "createdAt"}
remove_field => ["@version", "unix_ts_in_secs",
"cover_image_url", "book_id", "updated_at", "height", "is_deleted", "width", "thickness"]
}
}
output {
elasticsearch {
hosts => ["http://elasticsearch:9200"]
index => "book"
document_id => "%{[@metadata][_id]}"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,12 @@
@AllArgsConstructor(access = AccessLevel.PRIVATE)
@JsonNaming(PropertyNamingStrategies.SnakeCaseStrategy.class)
public class BookSearchLikeRes {
private String title;
private String author;
private String publisher;
private String coverImageUrl;
private int pages;
private String category;
private int like;
private Long id;
private String title;
private String author;
private String publisher;
private String coverImageUrl;
private int pages;
private String category;
private int like;
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
@AllArgsConstructor(access = AccessLevel.PRIVATE)
@JsonNaming(PropertyNamingStrategies.SnakeCaseStrategy.class)
public class BookSearchRes {
private Long id;
private String title;
private String author;
private String publisher;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,15 @@
import lombok.Getter;
import lombok.NoArgsConstructor;
import org.springframework.data.elasticsearch.annotations.Document;

import javax.persistence.*;

@Document(indexName = "book")
@Getter
@NoArgsConstructor(access = AccessLevel.PROTECTED)
public class BookDocument extends BaseEntity {
@Id
private String id;
private Long id;
private String title;
private String author;
private String publisher;
Expand All @@ -22,7 +23,8 @@ public class BookDocument extends BaseEntity {
private String category;

@Builder
public BookDocument(String title, String author, String publisher, String coverImageUrl, int pages, String category) {
public BookDocument(Long id, String title, String author, String publisher, String coverImageUrl, int pages, String category) {
this.id = id;
this.title = title;
this.author = author;
this.publisher = publisher;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ public BookRes toDto(Book book, int likes, boolean likeStatus) {
}
public BookSearchRes toBookSearchDto(BookDocument book) {
return BookSearchRes.builder()
.id(book.getId())
.title(book.getTitle())
.author(book.getAuthor())
.publisher(book.getPublisher())
Expand All @@ -40,6 +41,7 @@ public BookSearchRes toBookSearchDto(BookDocument book) {
}
public BookSearchLikeRes toBookSearchLikeDto(Book book) {
return BookSearchLikeRes.builder()
.id(book.getId())
.title(book.getTitle())
.author(book.getAuthor())
.publisher(book.getPublisher())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

import java.util.List;

public interface BookSearchRepository extends ElasticsearchRepository<BookDocument, String> {
public interface BookSearchRepository extends ElasticsearchRepository<BookDocument, Long> {
List<BookDocument> findByTitleContaining(String title);
Page<BookDocument> findAll(Pageable pageable);
}

0 comments on commit 8070f25

Please sign in to comment.