<aside> 💡
ELK 구성 및 통합 검색 기능 구현 가이드 문서입니다.
</aside>
<aside> 💡
사전에 필요한 준비 사항입니다.
docker.elastic.co/elasticsearch/elasticsearch:8.7.0
docker.elastic.co/kibana/kibana:8.7.0
docker.elastic.co/logstash/logstash:8.7.0
docker.elastic.co/package-registry/distribution:8.7.0
# 디렉토리 생성
$ mkdir -p ~/docker/elasticsearch/logstash/jdbc
$ mkdir ~/docker/elasticsearch/logstash/pipeline
$ mkdir ~/docker/elasticsearch/analysis
$ mkdir ~/docker/elasticsearch/elastic-data
# JDBC 파일 이동
$ mv {다운받은 JDBC 파일} ~/docker/elasticsearch/logstash/jdbc/postgresql.jar
# Analysis 파일 이동
$ mv {다운받은 Nori-analysis 파일} ~/docker/elasticsearch/analysis/nori.zip
# 마지막 실행 시간 관리 파일 편집
$ vi ~/docker/elasticsearch/logstash/sql_last_value.yml
=====================================================================================
--- 1970-01-01 00:00:00.000000+0000
# 데이터 수집 파이프라인 설정 파일 편집
$ vi ~/docker/elasticsearch/logstash/pipeline/logstash-postgresql.conf
=====================================================================================
input {
jdbc {
jdbc_validate_connection => true
jdbc_driver_library => "/usr/share/logstash/logstash-core/lib/jars/postgresql.jar"
jdbc_driver_class => "org.postgresql.Driver"
jdbc_connection_string => "jdbc:postgresql://${DB_HOST}:${DB_PORT}/${DB_NAME}"
jdbc_user => "${DB_USERNAME}"
jdbc_password => "${DB_PASSWORD}"
jdbc_paging_enabled => true
jdbc_page_size => 500000
lowercase_column_names => false
use_column_value => true
tracking_column => "update_time"
tracking_column_type => "timestamp"
schedule => "0 */5 * * * *"
last_run_metadata_path => "/usr/share/logstash/last_metadata/sql_last_value.yml"
statement => "SELECT menu_id, menu_name, menu_type_code, article_id, cd_nm AS article_type_name, article_type_code, article_title, article_content, thumbnail_image_binary, update_time, delete_yn
FROM (
SELECT
KB.bbs_id AS menu_id,
KB.bbs_nm AS menu_name,
CASE
WHEN KBD.pstg_txt_no IS NOT NULL THEN 'BBS-01'
WHEN KA.alrt_id IS NOT NULL THEN 'BBS-02'
WHEN KF.faq_id IS NOT NULL THEN 'BBS-03'
END AS menu_type_code,
COALESCE(KBD.pstg_txt_no, KA.alrt_id, KF.faq_id) AS article_id,
COALESCE(KBD.pstg_txt_type_cd, KA.alrt_se_cd, KF.qust_clsf_se_cd) AS article_type_code,
COALESCE(KBD.pstg_txt_ttl_nm, KA.alrt_ttl_nm, KF.faq_ttl_nm) AS article_title,
COALESCE(KBD.pstg_txt_cn, KA.alrt_cn, KF.faq_cn) AS article_content,
CASE
WHEN KBD.pstg_txt_no IS NOT NULL THEN KBD.rprs_img_cn
WHEN KA.alrt_id IS NOT NULL THEN KA.rprs_img_cn
ELSE NULL
END AS thumbnail_image_binary,
COALESCE(KBD.last_mdfcn_dt, KA.last_mdfcn_dt, KF.last_mdfcn_dt) AS update_time,
COALESCE(KBD.del_yn, KA.del_yn, KF.del_yn) AS delete_yn
FROM kn_bbs KB
LEFT OUTER JOIN kn_bbs_dtls KBD ON KB.bbs_id = KBD.bbs_id
LEFT OUTER JOIN kn_alrt KA ON KB.bbs_id = KA.bbs_id
LEFT OUTER JOIN kn_faq KF ON KB.bbs_id = KF.bbs_id
WHERE
KBD.last_mdfcn_dt > :sql_last_value
OR KA.last_mdfcn_dt > :sql_last_value
OR KF.last_mdfcn_dt > :sql_last_value
) BBS
LEFT OUTER JOIN kn_dtl_cd KDC ON ((BBS.menu_type_code = 'BBS-01' AND KDC.group_cd = 'KN001') OR (BBS.menu_type_code = 'BBS-02' AND KDC.group_cd = 'KN002') OR (BBS.menu_type_code = 'BBS-03' AND KDC.group_cd = 'KN003')) AND BBS.article_type_code = KDC.meta_cd
UNION ALL
SELECT 99 AS menu_id, '데이터분석' AS menu_name, 'BBS-04' AS menu_type_code, anls_sn AS article_id, NULL AS article_type_name, NULL AS article_type_code, anls_nm AS article_title, anls_cn AS article_content, NULL AS thumbnail_image_binary, last_mdfcn_dt AS update_time, del_yn AS delete_yn
FROM kn_data_anls_schdl
WHERE last_mdfcn_dt > :sql_last_value
ORDER BY update_time ASC"
}
}
filter {
mutate {
remove_field => ["@version", "@timestamp"]
}
}
output {
elasticsearch{
hosts => ["<http://$>{SERVER_HOST}:9200"]
user => "elastic"
password => "${ELASTIC_PASSWORD}"
index => "article"
document_id => "%{[menu_id]}-%{[article_id]}"
}
stdout {
codec => rubydebug
}
}
$ vi ~/docker/elasticsearch/elastic-init.sh
=====================================================================================
#!/bin/bash
echo "Waiting for Elasticsearch..."
until curl -s <http://localhost:9200> | grep "missing authentication credentials"; do sleep 1; done
echo "Creating index..."
until curl -X PUT '<http://localhost:9200/article>' -u "elastic:${ELASTIC_PASSWORD}" -H 'Content-Type: application/json' -d '{
"settings": {
"analysis": {
"analyzer": {
"custom_nori_analyzer": {
"type": "custom",
"tokenizer": "nori_tokenizer",
"filter": ["lowercase", "stop"],
"char_filter": ["html_strip"]
}
}
}
},
"mappings": {
"properties": {
"menu_id": {
"type": "keyword"
},
"menu_name": {
"type": "keyword"
},
"menu_type_code": {
"type": "keyword"
},
"article_id": {
"type": "keyword"
},
"article_type_name": {
"type": "keyword"
},
"article_type_code": {
"type": "keyword"
},
"article_title": {
"type": "text",
"analyzer": "custom_nori_analyzer"
},
"article_content": {
"type": "text",
"analyzer": "custom_nori_analyzer"
},
"thumbnail_image_binary": {
"type": "text",
"index": false
},
"delete_yn": {
"type": "keyword"
}
}
}
}'; do sleep 1; done
echo "Creating search template..."
until curl -X PUT '<http://localhost:9200/_scripts/type_search_template>' -u "elastic:${ELASTIC_PASSWORD}" -H 'Content-Type: application/json' -d '{
"script": {
"lang": "mustache",
"source": {
"query": {
"bool": {
"filter": [
{
"term": {
"delete_yn": "N"
}
},
{
"term": {
"menu_type_code": "{{menu_type_code}}"
}
}
],
"should": [
{
"match": {
"article_title": {
"query": "{{search_term}}",
"boost": 1.5
}
}
},
{
"match": {
"article_content": {
"query": "{{search_term}}",
"boost": 1
}
}
}
],
"minimum_should_match": 1
}
},
"highlight": {
"fields": {
"article_title": {}
}
},
"from": "{{from}}",
"size": "{{size}}"
}
}
}'; do sleep 1; done
until curl -X PUT '<http://localhost:9200/_scripts/group_search_template>' -u "elastic:${ELASTIC_PASSWORD}" -H 'Content-Type: application/json' -d '{
"script": {
"lang": "mustache",
"source": {
"size": 0,
"query": {
"bool": {
"filter": [
{
"term": {
"delete_yn": "N"
}
}
],
"should": [
{
"match": {
"article_title": {
"query": "{{search_term}}",
"boost": 1.5
}
}
},
{
"match": {
"article_content": {
"query": "{{search_term}}",
"boost": 1
}
}
}
],
"minimum_should_match": 1
}
},
"aggs": {
"menu_aggregation": {
"terms": {
"field": "menu_type_code",
"size": 4
},
"aggs": {
"documents": {
"top_hits": {
"size": 5,
"highlight": {
"fields": {
"article_title": {}
}
}
}
}
}
}
}
}
}
}'; do sleep 1; done
echo "Done...!!!"
$ vi ~/docker/elasticsearch/elasticsearch-Dockerfile
=====================================================================================
FROM docker.elastic.co/elasticsearch/elasticsearch:8.7.0
COPY ./analysis/nori.zip /usr/share/elasticsearch/nori.zip
COPY ./elastic-init.sh /usr/share/elasticsearch/init.sh
RUN /usr/share/elasticsearch/bin/elasticsearch-plugin install file:///usr/share/elasticsearch/nori.zip
ENTRYPOINT ["sh", "-c", "/usr/share/elasticsearch/init.sh & /usr/local/bin/docker-entrypoint.sh"]