diff --git a/.gitignore b/.gitignore
index d1792017..95f7a8b2 100644
--- a/.gitignore
+++ b/.gitignore
@@ -240,6 +240,7 @@ celerybeat.pid
*.sage.py
# Environments
+seoul.env
.env
.venv
env/
@@ -308,4 +309,9 @@ pyrightconfig.json
.history
.ionide
-# End of https://www.toptal.com/developers/gitignore/api/python,visualstudiocode,intellij
\ No newline at end of file
+# End of https://www.toptal.com/developers/gitignore/api/python,visualstudiocode,intellij
+
+# swyang add
+.autoenv
+test.py
+*.pid
\ No newline at end of file
diff --git a/API_ROUTER/app/__init__.py b/API-SERVICE/ApiList/__init__.py
similarity index 100%
rename from API_ROUTER/app/__init__.py
rename to API-SERVICE/ApiList/__init__.py
diff --git a/API_ROUTER/app/common/__init__.py b/API-SERVICE/ApiList/meta/__init__.py
similarity index 100%
rename from API_ROUTER/app/common/__init__.py
rename to API-SERVICE/ApiList/meta/__init__.py
diff --git a/API-SERVICE/ApiList/meta/addChildCategory.py b/API-SERVICE/ApiList/meta/addChildCategory.py
new file mode 100644
index 00000000..4816ed40
--- /dev/null
+++ b/API-SERVICE/ApiList/meta/addChildCategory.py
@@ -0,0 +1,25 @@
+import uuid
+from typing import Dict
+from ApiService.ApiServiceConfig import config
+from Utils.CommonUtil import connect_db, get_exception_info, convert_data
+from pydantic import BaseModel
+
+
+class addChildCategory(BaseModel):
+ prnts_id: str
+ node_nm: str
+
+
+def api(insert: addChildCategory) -> Dict:
+ query = f"INSERT INTO tb_category (node_nm, prnts_id, node_id)\
+ VALUES ({convert_data(insert.node_nm)},{convert_data(insert.prnts_id)},{convert_data(uuid.uuid4())});"
+
+ try:
+ db = connect_db()
+ db.execute(query)
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ result = {"result": 1, "errorMessage": ""}
+ return result
diff --git a/API-SERVICE/ApiList/meta/deleteElsBizMeta.py b/API-SERVICE/ApiList/meta/deleteElsBizMeta.py
new file mode 100644
index 00000000..f3577a2a
--- /dev/null
+++ b/API-SERVICE/ApiList/meta/deleteElsBizMeta.py
@@ -0,0 +1,24 @@
+from typing import Dict
+from pydantic import BaseModel
+from Utils.CommonUtil import get_exception_info
+from ELKSearch.Manager.manager import ElasticSearchManager
+from ApiService.ApiServiceConfig import config
+from ELKSearch.Utils.database_utils import get_config
+
+
+class DeleteData(BaseModel):
+ biz_dataset_id: str
+
+
+def api(input: DeleteData) -> Dict:
+ els_config = get_config(config.root_path, "config.ini")[config.db_type[:-3]]
+ try:
+ es = ElasticSearchManager(**els_config)
+ es.delete("biz_dataset_id", input.biz_dataset_id)
+
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ result = {"result": 1, "errorMessage": ""}
+ return result
diff --git a/API-SERVICE/ApiList/meta/emailAthnCnfm.py b/API-SERVICE/ApiList/meta/emailAthnCnfm.py
new file mode 100644
index 00000000..4da46e24
--- /dev/null
+++ b/API-SERVICE/ApiList/meta/emailAthnCnfm.py
@@ -0,0 +1,40 @@
+from typing import Dict
+from fastapi.logger import logger
+from pydantic import BaseModel
+from Utils.CommonUtil import get_exception_info, connect_db, convert_data
+
+
+class EmailAuthFail(Exception):
+ pass
+
+
+class EmailAthnCnfm(BaseModel):
+ email: str
+ athn_no: str
+
+
+def api(email_confirm: EmailAthnCnfm) -> Dict:
+ try:
+ db = connect_db()
+ email_info, _ = db.select(
+ f"SELECT * FROM tb_email_athn_info WHERE email={convert_data(email_confirm.email)}"
+ )
+
+ if email_info[0]["athn_no"] == email_confirm.athn_no:
+ time_zone = "Asia/Seoul"
+ db.execute(f"SET TIMEZONE={convert_data(time_zone)}")
+ db.execute(
+ f"UPDATE tb_email_athn_info \
+ SET athn_yn='Y', athn_date=NOW() WHERE email={convert_data(email_confirm.email)};"
+ )
+ else:
+ raise EmailAuthFail
+ logger.info("Successfully Auth Confirm.")
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ result = {"result": 1, "errorMessage": ""}
+
+ return result
+
diff --git a/API-SERVICE/ApiList/meta/emailAthnPass.py b/API-SERVICE/ApiList/meta/emailAthnPass.py
new file mode 100644
index 00000000..a03f7620
--- /dev/null
+++ b/API-SERVICE/ApiList/meta/emailAthnPass.py
@@ -0,0 +1,48 @@
+from typing import Dict
+from fastapi.logger import logger
+from pydantic import BaseModel
+from Utils.CommonUtil import get_exception_info, connect_db, convert_data
+from ApiService.ApiServiceConfig import config
+
+
+class EmailAuthFail(Exception):
+ pass
+
+
+class EmailAthnPass(BaseModel):
+ email: str
+ athn_no: str
+ new_password: str
+
+
+def api(email_athn_pass: EmailAthnPass) -> Dict:
+ user_id = email_athn_pass.email
+ new_password = email_athn_pass.new_password
+ user_info_table = config.user_info["table"]
+ try:
+ db = connect_db()
+ email_info, _ = db.select(
+ f"SELECT * FROM tb_email_athn_info WHERE email={convert_data(email_athn_pass.email)}"
+ )
+
+ if email_info[0]["athn_no"] == email_athn_pass.athn_no:
+ time_zone = "Asia/Seoul"
+ db.execute(f"SET TIMEZONE={convert_data(time_zone)}")
+ if email_info[0]["athn_yn"] == "Y":
+ db.execute(
+ f'UPDATE {user_info_table} SET {config.user_info["password_column"]} = {convert_data(config.pwd_context.hash(new_password))} \
+ WHERE {config.user_info["id_column"]} = {convert_data(user_id)};'
+ )
+ else:
+ raise EmailAuthFail
+ else:
+ raise EmailAuthFail
+ logger.info("Successfully Auth Password.")
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ result = {"result": 1, "errorMessage": ""}
+
+ return result
+
diff --git a/API-SERVICE/ApiList/meta/emailAthnSend.py b/API-SERVICE/ApiList/meta/emailAthnSend.py
new file mode 100644
index 00000000..03d7f41f
--- /dev/null
+++ b/API-SERVICE/ApiList/meta/emailAthnSend.py
@@ -0,0 +1,82 @@
+import random
+import string
+from typing import Dict
+
+from fastapi.logger import logger
+from pydantic import BaseModel
+
+from ApiService.ApiServiceConfig import config
+from Utils import insert_mail_history
+from Utils.CommonUtil import (
+ get_exception_info,
+ connect_db,
+ convert_data,
+ send_template_mail,
+)
+
+
+class EmailNotAuth(Exception):
+ pass
+
+
+class EmailNotExist(Exception):
+ pass
+
+
+class EmailAthnSend(BaseModel):
+ email: str
+ msg_type: str # register or password
+
+
+def make_auth_no():
+ string_pool = string.ascii_letters + string.digits
+ auth_no = ""
+ for _ in range(int(config.email_auth["auth_no_len"])):
+ auth_no += random.choice(string_pool)
+ return auth_no
+
+
+def make_email_auth_query(email, auth_no, exist_mail):
+ if exist_mail:
+ query = f"UPDATE tb_email_athn_info \
+ SET athn_no={convert_data(auth_no)}, send_date=NOW() WHERE email={convert_data(email)};"
+ else:
+ query = f"INSERT INTO tb_email_athn_info (email, athn_no, athn_yn, send_date) \
+ VALUES ({convert_data(email)}, {convert_data(auth_no)}, 'N', NOW());"
+ return query
+
+
+def api(email_auth: EmailAthnSend) -> Dict:
+ try:
+ auth_no = make_auth_no()
+ db = connect_db()
+ exist_mail, _ = db.select(f"SELECT * FROM tb_email_athn_info WHERE email={convert_data(email_auth.email)}")
+
+ if email_auth.msg_type == "password":
+ if len(exist_mail) == 0:
+ raise EmailNotExist
+ if exist_mail[0]["athn_yn"] == "N":
+ raise EmailNotAuth
+
+ send_template_mail(auth_no, email_auth.email, email_auth.msg_type)
+ insert_mail_history(
+ rcv_adr=email_auth.email,
+ title=config.email_auth[f"subject_{email_auth.msg_type}"],
+ contents=auth_no,
+ tmplt_cd=email_auth.msg_type,
+ )
+
+ time_zone = "Asia/Seoul"
+ db.execute(f"SET TIMEZONE={convert_data(time_zone)}")
+ query = make_email_auth_query(email_auth.email, auth_no, exist_mail)
+ db.execute(query)
+
+ logger.info("Successfully sent the mail.")
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ result = {"result": 1, "errorMessage": ""}
+
+ return result
+
diff --git a/API-SERVICE/ApiList/meta/getCategoryList.py b/API-SERVICE/ApiList/meta/getCategoryList.py
new file mode 100644
index 00000000..76716c82
--- /dev/null
+++ b/API-SERVICE/ApiList/meta/getCategoryList.py
@@ -0,0 +1,17 @@
+from typing import Dict
+from ApiService.ApiServiceConfig import config
+from Utils.CommonUtil import connect_db, get_exception_info
+
+
+def api() -> Dict:
+ category_query = "SELECT * FROM tb_category ORDER BY prnts_id, node_id;"
+
+ try:
+ db = connect_db()
+ category_list = db.select(category_query)[0]
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ result = {"result": 1, "errorMessage": "", "data": category_list}
+ return result
diff --git a/API-SERVICE/ApiList/meta/getCategoryNmCount.py b/API-SERVICE/ApiList/meta/getCategoryNmCount.py
new file mode 100644
index 00000000..655b0371
--- /dev/null
+++ b/API-SERVICE/ApiList/meta/getCategoryNmCount.py
@@ -0,0 +1,29 @@
+from typing import Dict
+from ELKSearch.Manager.manager import ElasticSearchManager
+from ELKSearch.Utils.elasticsearch_utils import make_query
+from Utils.CommonUtil import get_exception_info
+from ELKSearch.Utils.database_utils import get_config
+from ApiService.ApiServiceConfig import config
+
+
+def api(nms) -> Dict:
+ data_dict = {}
+ key = "re_ctgry"
+ els_config = get_config(config.root_path, "config.ini")[config.db_type[:-3]]
+ try:
+ ctgry_nm_list = nms.split(",")
+ es = ElasticSearchManager(**els_config)
+ for c_id in ctgry_nm_list:
+ c_v = c_id.replace(" ","")
+ cnt_query = make_query("query", "match_phrase", {key: c_v})
+ cnt = es.conn.count(index=es.index, body=cnt_query)["count"]
+ data_dict[c_id.replace(" ", "_")] = cnt
+
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ pass
+ result = {"result": 1, "errorMessage": "", "data": data_dict}
+
+ return result
diff --git a/API-SERVICE/ApiList/meta/getCategoryTree.py b/API-SERVICE/ApiList/meta/getCategoryTree.py
new file mode 100644
index 00000000..e60aa7cf
--- /dev/null
+++ b/API-SERVICE/ApiList/meta/getCategoryTree.py
@@ -0,0 +1,41 @@
+from typing import Dict
+from ApiService.ApiServiceConfig import config
+from Utils.CommonUtil import connect_db, make_res_msg, get_exception_info
+
+
+def api() -> Dict:
+ get_category_list = "SELECT * FROM tb_category;"
+
+ try:
+ db = connect_db()
+ category_list, _ = db.select(get_category_list)
+
+ node_dict = {}
+ category_tree = {}
+ for category in category_list:
+ node_dict[category["node_id"]] = category["node_nm"]
+ category_tree[category["node_nm"]] = []
+
+ for category in category_list:
+ if node_dict.get(category["prnts_id"]):
+ parent_name = node_dict[category["prnts_id"]]
+ category_tree[parent_name].append(category["node_nm"])
+
+ result_category = {}
+ for category in category_tree["ROOT"]:
+ if category == "ROOT":
+ continue
+ result_category[category] = None
+
+ for main_category, sub_category in category_tree.items():
+ if sub_category:
+ if main_category == "ROOT":
+ continue
+ result_category[main_category] = sub_category
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ result = make_res_msg(1, "", result_category, [])
+
+ return result
diff --git a/API-SERVICE/ApiList/meta/getElsBizMetaList.py b/API-SERVICE/ApiList/meta/getElsBizMetaList.py
new file mode 100644
index 00000000..2add20c7
--- /dev/null
+++ b/API-SERVICE/ApiList/meta/getElsBizMetaList.py
@@ -0,0 +1,77 @@
+from typing import Dict
+from datetime import datetime
+from ELKSearch.Manager.manager import ElasticSearchManager
+from ELKSearch.Utils.model import InputModel
+from ELKSearch.Utils.elasticsearch_utils import make_query, base_search_query
+from ELKSearch.Utils.database_utils import get_config
+from Utils.CommonUtil import get_exception_info
+from Utils.SearchUtil import search_count
+from ApiService.ApiServiceConfig import config
+
+
+def extra_filter(option_list):
+ els_katech_option = ["ctgry", "data_shap", "data_prv_desk"]
+ for item in option_list:
+ for col in els_katech_option:
+ if col in item.field:
+ item.field.append(f"re_{col}")
+ index = item.field.index(col)
+ del item.field[index]
+ item.keywords = [v.replace(" ", "") for v in item.keywords]
+
+ tmp = []
+ for field in item.field:
+ tmp.append(field)
+ if field in ["data_nm", "data_desc"]:
+ col = field + ".korean_analyzer"
+ tmp.append(col)
+ item.field = tmp
+
+ return option_list
+
+
+def api(input: InputModel) -> Dict:
+ from_ = input.from_ - 1
+ els_config = get_config(config.root_path, "config.ini")[config.db_type[:-3]]
+ try:
+ if input.chk and len(input.searchOption):
+ with open(
+ f"{config.root_path}/log/{config.category}/{datetime.today().strftime('%Y%m%d')}_search.log",
+ "a",
+ ) as fp:
+ for search in input.searchOption:
+ fp.write(f"{str(search.keywords)}\n")
+
+ es = ElasticSearchManager(page=from_, size=input.size, index=input.index, **els_config)
+ es.set_sort(input.sortOption)
+
+ ############ search option ############
+ action = "query"
+ sub_action = "must"
+ input.searchOption = extra_filter(input.searchOption)
+ query_dict = base_search_query(action, sub_action, input.searchOption)
+
+ # ############ filter option ############
+ sub_action = "filter"
+ input.filterOption = extra_filter(input.filterOption)
+ item_dict = base_search_query(action, sub_action, input.filterOption)
+ query_dict.update(item_dict)
+ search_query = make_query(action, "bool", query_dict)
+ es.body.update(search_query)
+
+ # ############ sort option ############
+ sort_list = [{item.field: item.order} for item in input.sortOption]
+ es.set_sort(sort_list)
+ search_data = es.search(input.resultField)
+
+ data_dict = search_count(es, item_dict, query_dict)
+
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ search_list = [data["_source"] for data in search_data["hits"]["hits"]]
+ data_dict["searchList"] = search_list
+ result = {"result": 1, "errorMessage": "", "data": data_dict}
+
+ return result
diff --git a/API-SERVICE/ApiList/meta/getElsCkanList.py b/API-SERVICE/ApiList/meta/getElsCkanList.py
new file mode 100644
index 00000000..225b21d3
--- /dev/null
+++ b/API-SERVICE/ApiList/meta/getElsCkanList.py
@@ -0,0 +1,55 @@
+from typing import Dict
+from datetime import datetime
+from ELKSearch.Manager.manager import ElasticSearchManager
+from ELKSearch.Utils.model import InputModel
+from ELKSearch.Utils.elasticsearch_utils import make_query, base_search_query
+from ELKSearch.Utils.database_utils import get_config
+from Utils.CommonUtil import get_exception_info
+from Utils.SearchUtil import search_count, ckan_query
+from ApiService.ApiServiceConfig import config
+
+
+def api(input: InputModel) -> Dict:
+ """
+ 2023-10-20 변경사항
+ ckan_data 사용X
+ 해외데이터 외부데이터는 v_biz_meta_oversea_els 통합
+ :param search_option:
+ :return:
+ """
+ from_ = input.from_ - 1
+ els_config = get_config(config.root_path, "config.ini")[config.db_type[:-3]]
+ index = "ckan_data"
+ try:
+ if input.chk and len(input.searchOption):
+ with open(
+ f"{config.root_path}/log/{config.category}/{datetime.today().strftime('%Y%m%d')}_search.log",
+ "a",
+ ) as fp:
+ for search in input.searchOption:
+ fp.write(f"{str(search.keywords)}\n")
+
+ es = ElasticSearchManager(page=from_, size=input.size, index=index, **els_config)
+ es.set_sort(input.sortOption)
+
+ ############ search option ############
+ query_dict = ckan_query(input.searchOption)
+ search_query = make_query("query","bool", query_dict)
+ es.body.update(search_query)
+
+ # ############ sort option ############
+ sort_list = [{item.field: item.order} for item in input.sortOption]
+ es.set_sort(sort_list)
+ search_data = es.search(input.resultField)
+
+ data_dict = search_count(es, {'filter': []}, query_dict)
+
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ search_list = [data["_source"] for data in search_data["hits"]["hits"]]
+ data_dict["searchList"] = search_list
+ result = {"result": 1, "errorMessage": "", "data": data_dict}
+
+ return result
diff --git a/API-SERVICE/ApiList/meta/getPrefixBizMeta.py b/API-SERVICE/ApiList/meta/getPrefixBizMeta.py
new file mode 100644
index 00000000..b23917fa
--- /dev/null
+++ b/API-SERVICE/ApiList/meta/getPrefixBizMeta.py
@@ -0,0 +1,58 @@
+from typing import Dict
+from fastapi.logger import logger
+from pydantic import BaseModel
+from ELKSearch.Manager.manager import ElasticSearchManager
+from Utils.CommonUtil import get_exception_info
+from ELKSearch.Utils.database_utils import get_config
+from ApiService.ApiServiceConfig import config
+
+
+class Prefix(BaseModel):
+ index: str
+ size: int
+ fields: list
+ query: str
+
+
+def api(input: Prefix) -> Dict:
+ """
+ Auto Complete data_nm
+ DB의 Like 검색과 유사함
+ :param keyword: type dict, ex) {"data_name" : "테"}
+ :return:
+ """
+ if not len(input.fields):
+ input.fields = ["data_nm"]
+ els_config = get_config(config.root_path,"config.ini")[config.db_type[:-3]]
+ try:
+ els_config["index"] = ["biz_meta","v_biz_meta_oversea_els"]
+ es = ElasticSearchManager(**els_config)
+ es.size = input.size
+ input.query = f"(*{input.query}*)"
+ del input.index
+ del input.size
+ search_query = {"query_string": input.dict()}
+ logger.info(search_query)
+
+ body = {
+ "query": {
+ "bool": {
+ "must": [search_query]
+ }
+ }
+ }
+ es.body = body
+ logger.info(es.body)
+ prefix_data = es.search(input.fields)
+ logger.info(prefix_data)
+
+ if not len(prefix_data):
+ return {"result": 1,"data": []}
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ prefix_data = [data["_source"]["data_nm"] for data in prefix_data["hits"]["hits"]]
+ result = {"result": 1, "errorMessage": "", "data": prefix_data}
+
+ return result
diff --git a/API-SERVICE/ApiList/meta/insertElsBizMeta.py b/API-SERVICE/ApiList/meta/insertElsBizMeta.py
new file mode 100644
index 00000000..5f3a5716
--- /dev/null
+++ b/API-SERVICE/ApiList/meta/insertElsBizMeta.py
@@ -0,0 +1,55 @@
+import uuid
+from typing import Dict
+from Utils.CommonUtil import get_exception_info
+from pydantic import BaseModel
+from ELKSearch.Manager.manager import ElasticSearchManager
+
+
+class BizMeta(BaseModel):
+ biz_dataset_id: str
+ src_url: str
+ kywrd: str
+ ctgry: str
+ data_updt_cyc: str
+ adm_dep: str
+ admr_nm: str
+ file_read_authority: str
+ retv_num: str
+ data_desc: str
+ data_prv_desk: str
+ license: str
+ lang: str
+ adm_dep_hp: str
+ data_nm: str
+ updt_nxt_dt: str
+ updt_dt: str
+ reg_dt: str
+ reg_user: str
+ amd_user: str
+ reg_date: str
+ amd_date: str
+ data_shap: str
+ data_srttn: str
+ data_limit: str
+ othr_use_notes: str
+ data_eng_nm: str
+ downl_num: str
+ attnt_data_num: str
+ share_num: str
+ contents: str
+
+
+def api(biz_meta_data: BizMeta) -> Dict:
+ uid = uuid.uuid4()
+ try:
+ es = ElasticSearchManager()
+ biz_meta_data = biz_meta_data.dict()
+ biz_meta_data["biz_dataset_id"] = uid
+ es.insert(biz_meta_data, biz_meta_data["biz_dataset_id"])
+
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ result = {"result": 1, "errorMessage": ""}
+ return result
diff --git a/API-SERVICE/ApiList/meta/metaInsert.py b/API-SERVICE/ApiList/meta/metaInsert.py
new file mode 100644
index 00000000..6eb4a1a6
--- /dev/null
+++ b/API-SERVICE/ApiList/meta/metaInsert.py
@@ -0,0 +1,50 @@
+from typing import Dict
+from ApiService.ApiServiceConfig import config
+from Utils.CommonUtil import connect_db, get_exception_info, convert_data
+import os
+import base64
+
+
+def print_files_in_dir(root_dir, file_name):
+ files = os.listdir(root_dir)
+ print(len(files))
+ for file in files:
+ path = os.path.join(root_dir, file, file_name)
+ print(path)
+
+
+def api() -> Dict:
+ eda_path = "/Users/cbc/Downloads/EDA_FILE"
+ try:
+ db = connect_db()
+ files = os.listdir(eda_path)
+ id_cnt = 0
+ for index, rid in enumerate(files):
+ print(index)
+ path = os.path.join(eda_path, rid, "profile_report_merged.html")
+ with open(path, "rb") as fd:
+ data = fd.read()
+ data_base64 = base64.b64encode(data).decode("ascii")
+ insert_data = f"data:text/html;base64,{data_base64}"
+ print(f"LEN : {len(insert_data)}")
+ # print(insert_data)
+ # query = f'UPDATE meta_temp SET file_data = {convert_data(insert_data)}\
+ # WHERE gimi9_rid = {convert_data(rid)}'
+ select_query = f"select biz_dataset_id from meta_temp where gimi9_rid = {convert_data(rid)}"
+ select_res, _ = db.select(select_query)
+ if select_res:
+ biz_dataset_id = select_res[0]["biz_dataset_id"]
+ query = f"INSERT INTO tb_meta_html (biz_dataset_id, file_data) VALUES ({convert_data(biz_dataset_id)}, {convert_data(insert_data)});"
+ db.execute(query)
+ else:
+ id_cnt += 1
+ print(f"id_cnt : {id_cnt}")
+
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+
+ result = {"result": 1, "errorMessage": ""}
+
+ return result
diff --git a/API-SERVICE/ApiList/meta/updateCategory.py b/API-SERVICE/ApiList/meta/updateCategory.py
new file mode 100644
index 00000000..73bbb3e2
--- /dev/null
+++ b/API-SERVICE/ApiList/meta/updateCategory.py
@@ -0,0 +1,27 @@
+import uuid
+from ApiService.ApiServiceConfig import config
+from Utils.CommonUtil import connect_db, get_exception_info, convert_data
+from pydantic import BaseModel
+from typing import Dict
+
+
+class UpdateCategory(BaseModel):
+ node_id: str
+ node_nm: str
+
+
+def api(update: UpdateCategory) -> Dict:
+ query = f"UPDATE tb_category\
+ SET prnts_id = {convert_data(uuid.uuid4())},\
+ node_id = {convert_data(update.node_id)},\
+ node_nm = {convert_data(update.node_nm)}\
+ WHERE node_id = {convert_data(update.node_id)};"
+ try:
+ db = connect_db()
+ db.execute(query)
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ result = {"result": 1, "errorMessage": ""}
+ return result
diff --git a/API-SERVICE/ApiList/meta/updateElsBizMeta.py b/API-SERVICE/ApiList/meta/updateElsBizMeta.py
new file mode 100644
index 00000000..293cb8f7
--- /dev/null
+++ b/API-SERVICE/ApiList/meta/updateElsBizMeta.py
@@ -0,0 +1,29 @@
+from typing import Dict
+from pydantic import BaseModel
+from Utils.CommonUtil import get_exception_info, connect_db, convert_data
+from ELKSearch.Manager.manager import ElasticSearchManager
+from ELKSearch.Utils.database_utils import get_config
+from ELKSearch.Utils.elasticsearch_utils import data_process
+from ApiService.ApiServiceConfig import config
+
+
+class UpdateData(BaseModel):
+ biz_dataset_id: str
+
+
+def api(input: UpdateData) -> Dict:
+ els_config = get_config(config.root_path,"config.ini")[config.db_type[:-3]]
+ query = f"SELECT * FROM v_biz_meta_info WHERE biz_dataset_id = {convert_data(input.biz_dataset_id)}"
+ try:
+ db = connect_db()
+ es = ElasticSearchManager(**els_config)
+ biz_data = db.select(query)[0][0]
+
+ els_dict = data_process(biz_data)["_source"]
+ es.conn.index(index=es.index,body=els_dict,id=input.biz_dataset_id)
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ result = {"result": 1, "errorMessage": ""}
+ return result
diff --git a/API-SERVICE/ApiList/meta/updateElsBizMetaBulk.py b/API-SERVICE/ApiList/meta/updateElsBizMetaBulk.py
new file mode 100644
index 00000000..919f18df
--- /dev/null
+++ b/API-SERVICE/ApiList/meta/updateElsBizMetaBulk.py
@@ -0,0 +1,34 @@
+from typing import Dict
+from elasticsearch import helpers
+from Utils.CommonUtil import get_exception_info, connect_db
+from ELKSearch.Manager.manager import ElasticSearchManager
+from ELKSearch.Utils.database_utils import get_config
+from ELKSearch.Utils.elasticsearch_utils import data_process
+from ApiService.ApiServiceConfig import config
+
+
+def api() -> Dict:
+ """
+ bulk로 업데이트 할 때 timeout이 발생하는 이슈가 있음
+ """
+ els_config = get_config(config.root_path,"config.ini")[config.db_type[:-3]]
+ # bulk_meta_item = list()
+ db_query = f"SELECT * FROM v_biz_meta_info WHERE status = 'D'"
+
+ try:
+ db = connect_db()
+ es = ElasticSearchManager(**els_config)
+
+ meta_wrap_list = db.select(db_query)[0]
+ for meta_wrap in meta_wrap_list:
+ els_dict = data_process(meta_wrap)
+ es.insert(els_dict["_source"],meta_wrap["biz_dataset_id"])
+ # bulk_meta_item.append(els_dict)
+ # helpers.bulk(es.conn, bulk_meta_item, index=es.index)
+
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ result = {"result": 1, "errorMessage": ""}
+ return result
diff --git a/API_ROUTER/app/routes/__init__.py b/API-SERVICE/ApiList/sitemng/__init__.py
similarity index 100%
rename from API_ROUTER/app/routes/__init__.py
rename to API-SERVICE/ApiList/sitemng/__init__.py
diff --git a/API-SERVICE/ApiList/sitemng/getCodeInfo.py b/API-SERVICE/ApiList/sitemng/getCodeInfo.py
new file mode 100644
index 00000000..b7d0e961
--- /dev/null
+++ b/API-SERVICE/ApiList/sitemng/getCodeInfo.py
@@ -0,0 +1,32 @@
+from typing import Dict
+from ApiService.ApiServiceConfig import config
+from Utils.CommonUtil import connect_db, get_exception_info, convert_data
+
+
+def api(groupId) -> Dict:
+ get_code_info_query = f"SELECT code_id, code_nm, data_1, data_2 \
+ FROM tb_code_detail \
+ WHERE code_group_id = {convert_data(groupId)};"
+ try:
+ db = connect_db()
+ code_list = db.select(get_code_info_query)
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ code_info = []
+ if len(code_list[0]):
+ code_info = [
+ {
+ "code_id": code_detail["code_id"],
+ "code_nm": code_detail["code_nm"],
+ "data_1": code_detail["data_1"],
+ "data_2": code_detail["data_2"],
+ }
+ for code_detail in code_list[0]
+ ]
+
+ body = {"list": code_info}
+ result = {"result": 1, "errorMessage": "", "data": body}
+
+ return result
diff --git a/API-SERVICE/ApiList/sitemng/getCodeList.py b/API-SERVICE/ApiList/sitemng/getCodeList.py
new file mode 100644
index 00000000..74a01ff3
--- /dev/null
+++ b/API-SERVICE/ApiList/sitemng/getCodeList.py
@@ -0,0 +1,52 @@
+from typing import Dict
+from ApiService.ApiServiceConfig import config
+from Utils.CommonUtil import connect_db, get_exception_info, convert_data
+from fastapi.logger import logger
+
+
+def api(perPage: int, curPage: int, gropId: str, keyword: str = "") -> Dict:
+
+ curPage = curPage - 1
+ total_cnt_query = "SELECT count(*) AS cnt FROM tb_code_detail"
+ code_list_query = (
+ "SELECT *, row_number () OVER (ORDER BY {0}) AS rowNo FROM tb_code_detail"
+ )
+
+ try:
+ db = connect_db()
+ common_condition = f" WHERE code_group_id = {convert_data(gropId)}"
+ code_list_query = code_list_query + common_condition
+ total_cnt_query = total_cnt_query + common_condition
+
+ if len(keyword):
+ # keyword 검색 조건 추가
+ order_condition = f"code_nm SIMILAR to '%{keyword}%' DESC"
+ search_condition = f"AND code_nm LIKE '%{keyword}%'"
+
+ code_list_query = code_list_query + search_condition
+ total_cnt_query = total_cnt_query + search_condition
+ code_list_query = code_list_query.format(order_condition)
+ else:
+ order_condition = "reg_date ASC"
+ code_list_query = code_list_query.format(order_condition)
+
+ paging_condition = f" LIMIT {perPage} OFFSET ({perPage} * {curPage})"
+ code_list_query = code_list_query + paging_condition
+
+ code_list = db.select(code_list_query)
+ total_cnt = db.select(total_cnt_query)
+
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ code_info = []
+ if len(code_list[0]):
+ code_info = [
+ {"code_id": code_detail["code_id"], "code_nm": code_detail["code_nm"]}
+ for code_detail in code_list[0]
+ ]
+
+ body = {"totalcount": total_cnt[0][0]["cnt"], "list": code_info}
+ result = {"result": 1, "errorMessage": "", "data": body}
+ return result
diff --git a/API-SERVICE/ApiService/ApiService.py b/API-SERVICE/ApiService/ApiService.py
new file mode 100644
index 00000000..a853b89f
--- /dev/null
+++ b/API-SERVICE/ApiService/ApiService.py
@@ -0,0 +1,27 @@
+import importlib.util
+from fastapi.logger import logger
+from fastapi import APIRouter
+from ApiService.ApiServiceConfig import config
+
+
+class ApiService:
+ def __init__(self) -> None:
+ self.router = APIRouter()
+ self.set_route()
+
+ def set_route(self) -> None:
+ for api_name, api_info in config.api_config.items():
+ if config.category == api_info["sub_dir"]:
+ module_path = (
+ f'{config.root_path}/ApiList/{api_info["sub_dir"]}/{api_name}.py'
+ )
+ module_name = "api"
+ spec = importlib.util.spec_from_file_location(module_name, module_path)
+ module = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(module)
+ self.router.add_api_route(
+ f'{api_info["url"]}',
+ module.api,
+ methods=[api_info["method"]],
+ tags=[f'service [ {api_info["sub_dir"]} ]'],
+ )
diff --git a/API-SERVICE/ApiService/ApiServiceConfig.py b/API-SERVICE/ApiService/ApiServiceConfig.py
new file mode 100644
index 00000000..65a2601c
--- /dev/null
+++ b/API-SERVICE/ApiService/ApiServiceConfig.py
@@ -0,0 +1,31 @@
+from typing import Dict
+from psycopg2 import pool
+from passlib.context import CryptContext
+
+
+class ApiServiceConfig:
+ root_path: str
+
+ category: str
+
+ db_type: str
+ db_info: Dict
+
+ remote_info: Dict
+
+ server_host: str
+ server_port: int
+
+ api_config: Dict
+
+ secret_info: Dict
+ user_info: Dict
+ pwd_context: CryptContext
+ email_auth: Dict
+
+ conn_pool: pool.SimpleConnectionPool
+
+ keycloak_info: Dict
+
+
+config = ApiServiceConfig
diff --git a/API-SERVICE/ApiService/__init__.py b/API-SERVICE/ApiService/__init__.py
new file mode 100644
index 00000000..9213060d
--- /dev/null
+++ b/API-SERVICE/ApiService/__init__.py
@@ -0,0 +1,2 @@
+from .ApiService import *
+from .ApiServiceConfig import *
diff --git a/API-SERVICE/ConnectManager/PostgresManager.py b/API-SERVICE/ConnectManager/PostgresManager.py
new file mode 100644
index 00000000..3c2154b4
--- /dev/null
+++ b/API-SERVICE/ConnectManager/PostgresManager.py
@@ -0,0 +1,54 @@
+import psycopg2
+from typing import List, Dict, Tuple, Any
+from ApiService.ApiServiceConfig import config
+from fastapi.logger import logger
+
+
+class PostgresManager:
+ def __init__(self) -> None:
+ self.conn = self.connect()
+ self.cursor = self.conn.cursor()
+
+ def connect(self):
+ conn = config.conn_pool.getconn()
+
+ logger.info("PostgresManager Connect.")
+ return conn
+
+ def execute(self, sql: str) -> None:
+ self.cursor.execute(sql)
+ self.conn.commit()
+ logger.info(f"PostgresManager Execute Result. ({sql})")
+
+ def multiple_excute(self, sql_list: list) -> None:
+ try:
+ for index, sql in enumerate(sql_list):
+ logger.info(f"PostgresManager Multiple Execute. ({index}. {sql})")
+ self.cursor.execute(sql)
+ self.conn.commit()
+ except (Exception, psycopg2.DatabaseError):
+ self.conn.rollback()
+ raise psycopg2.DatabaseError
+
+ def select(
+ self, sql: str, count: int = None
+ ) -> Tuple[List[Dict[Any, Any]], List[Any]]:
+ self.execute(sql)
+ column_names = [desc[0] for desc in self.cursor.description]
+ if count is None:
+ rows = self.cursor.fetchall()
+ else:
+ rows = self.cursor.fetchmany(count)
+ # logger.info(f'PostgresManager Select Execute. ({sql})')
+
+ result = []
+ for row in rows:
+ result.append(dict(zip(column_names, row)))
+ return result, column_names
+
+ def commit(self):
+ self.conn.commit()
+
+ def __del__(self) -> None:
+ self.cursor.close()
+ config.conn_pool.putconn(self.conn)
diff --git a/API-SERVICE/ConnectManager/__init__.py b/API-SERVICE/ConnectManager/__init__.py
new file mode 100644
index 00000000..b5c0391e
--- /dev/null
+++ b/API-SERVICE/ConnectManager/__init__.py
@@ -0,0 +1 @@
+from .PostgresManager import *
diff --git a/API-SERVICE/ELKSearch/.gitignore b/API-SERVICE/ELKSearch/.gitignore
new file mode 100644
index 00000000..fd1b0921
--- /dev/null
+++ b/API-SERVICE/ELKSearch/.gitignore
@@ -0,0 +1,2 @@
+/.idea/
+*.iml
\ No newline at end of file
diff --git a/API-SERVICE/ELKSearch/Manager/__init__.py b/API-SERVICE/ELKSearch/Manager/__init__.py
new file mode 100644
index 00000000..b6e690fd
--- /dev/null
+++ b/API-SERVICE/ELKSearch/Manager/__init__.py
@@ -0,0 +1 @@
+from . import *
diff --git a/API-SERVICE/ELKSearch/Manager/manager.py b/API-SERVICE/ELKSearch/Manager/manager.py
new file mode 100644
index 00000000..b0cd39ad
--- /dev/null
+++ b/API-SERVICE/ELKSearch/Manager/manager.py
@@ -0,0 +1,79 @@
+from typing import Dict, Any, Union
+from elasticsearch import Elasticsearch
+from ELKSearch.Utils.elasticsearch_utils import make_query
+
+
+class ElasticSearchManager:
+ def __init__(
+ self,
+ host: str = "192.168.101.44",
+ port: str = "39200",
+ page: int = 0,
+ size: int = 10,
+ index: str = "biz_meta",
+ ):
+ """
+ set elasticsearch connect && DSL query setting function
+ :param host: elasticsearch host ip addr, default = localhost
+ :param port: elasticsearch ip port number, default = 9200
+ :param index:
+ :param page: page, size * page , elasticsearch default value = 0
+ :param size: 아이템 개수 , elasticsearch default value = 10
+ """
+ self.host = host
+ self.port = port
+ self.size = size
+ self.index = index
+ self.cur_from = size * page
+ self.conn = self.connect()
+ self.body = self.set_default_option()
+
+ def connect(self) -> Elasticsearch:
+ es = Elasticsearch(f"http://{self.host}:{self.port}", timeout=30, max_retries=10, retry_on_timeout=True)
+ return es
+
+ def set_default_option(self) -> Dict[Any, Any]:
+ # 유지 보수를 위해 model 적용 안 함
+ self.body = {
+ "sort": [],
+ }
+ return self.body
+
+ def set_sort(self, sort: list) -> None:
+ self.body["sort"] = sort
+
+ def set_pagination(self, size: int, from_: int) -> None:
+ self.size = size
+ self.cur_from = size * from_
+
+ def search(self, source=...):
+ return self.conn.search(
+ index=self.index,
+ body=self.body,
+ from_=self.cur_from,
+ size=self.size,
+ _source=source,
+ )
+
+ def insert(self, body: dict, doc_id: str) -> None:
+ return self.conn.index(index=self.index, body=body, id=doc_id)
+
+ def update(self, body: dict, doc_id: str):
+ return self.conn.update(index=self.index, id=doc_id, body=body)
+
+ def delete(self, field: str, data: Union[str, list]):
+ """
+ 단수 : { query: { term: _id}}
+ 복수 : { query : { term : []}}
+ :param field: data type str, elasticsearch index _source name
+ :param data: data type str or list
+ """
+ delete_data = {field: data}
+ delete_command = make_query("query", "term", delete_data)
+ return self.conn.delete_by_query(index=self.index, body=delete_command)
+
+ def prefix(self, keyword: dict, source=...):
+ prefix_query = make_query("query", "prefix", keyword)
+ return self.conn.search(
+ index=self.index, body=prefix_query, size=self.size, _source=source
+ )
diff --git a/API-SERVICE/ELKSearch/README.md b/API-SERVICE/ELKSearch/README.md
new file mode 100644
index 00000000..15643c0c
--- /dev/null
+++ b/API-SERVICE/ELKSearch/README.md
@@ -0,0 +1,2 @@
+# pyes
+python elasticsearch controller
diff --git a/API-SERVICE/ELKSearch/Utils/__init__.py b/API-SERVICE/ELKSearch/Utils/__init__.py
new file mode 100644
index 00000000..b6e690fd
--- /dev/null
+++ b/API-SERVICE/ELKSearch/Utils/__init__.py
@@ -0,0 +1 @@
+from . import *
diff --git a/API-SERVICE/ELKSearch/Utils/database_utils.py b/API-SERVICE/ELKSearch/Utils/database_utils.py
new file mode 100644
index 00000000..f19188d9
--- /dev/null
+++ b/API-SERVICE/ELKSearch/Utils/database_utils.py
@@ -0,0 +1,105 @@
+import argparse
+import configparser
+from psycopg2 import pool
+from typing import List, Dict, Tuple, Any
+from ELKSearch.Manager.manager import ElasticSearchManager
+
+
+class ElsSearchConfig:
+ root_path: str
+ category: str
+
+ db_type: str
+ db_info: Dict
+
+ els_type: str
+ els_info: Dict
+ check: bool
+
+ conn_pool: pool.SimpleConnectionPool
+ es: ElasticSearchManager
+
+
+config = ElsSearchConfig
+
+
+def get_config(root_path, config_name: str):
+ ano_cfg = {}
+
+ conf = configparser.ConfigParser()
+ config_path = root_path + f"/ELKSearch/conf/{config_name}"
+ conf.read(config_path, encoding="utf-8")
+ for section in conf.sections():
+ ano_cfg[section] = {}
+ for option in conf.options(section):
+ ano_cfg[section][option] = conf.get(section, option)
+
+ return ano_cfg
+
+
+def parser_params() -> Any:
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--category", default="local")
+ parser.add_argument("--db_type", default="local")
+ parser.add_argument("--check", default="True")
+
+ return parser.parse_args()
+
+
+def prepare_config(root_path) -> None:
+ args = parser_params()
+ config.root_path = root_path
+ config.category = args.category
+
+ db_config = get_config(root_path, "db_config.ini")
+ els_config = get_config(root_path, "config.ini")
+
+ config.els_type = args.category
+ config.els_info = els_config[args.category]
+ if config.category != "email":
+ config.es = ElasticSearchManager(**config.els_info)
+ config.check = args.check
+
+ config.db_type = f"{args.db_type}_db"
+ config.db_info = db_config[config.db_type]
+ config.conn_pool = make_connection_pool(config.db_info)
+
+
+def make_connection_pool(db_info):
+ conn_pool = pool.SimpleConnectionPool(
+ 1,
+ 20,
+ user=db_info["user"],
+ password=db_info["password"],
+ host=db_info["host"],
+ port=db_info["port"],
+ database=db_info["database"],
+ options=f'-c search_path={db_info["schema"]}',
+ connect_timeout=10,
+ )
+ return conn_pool
+
+
+def connect_db():
+ conn = config.conn_pool.getconn()
+ return conn
+
+
+def execute(conn, cursor, sql) -> None:
+ cursor.execute(sql)
+ conn.commit()
+
+
+def select(conn, sql: str, count: int = None) -> Tuple[List[Dict[Any, Any]], List[Any]]:
+ cursor = conn.cursor()
+ execute(conn, cursor, sql)
+ column_names = [desc[0] for desc in cursor.description]
+ if count is None:
+ rows = cursor.fetchall()
+ else:
+ rows = cursor.fetchmany(count)
+
+ result = []
+ for row in rows:
+ result.append(dict(zip(column_names, row)))
+ return result, column_names
diff --git a/API-SERVICE/ELKSearch/Utils/elasticsearch_utils.py b/API-SERVICE/ELKSearch/Utils/elasticsearch_utils.py
new file mode 100644
index 00000000..c80381b2
--- /dev/null
+++ b/API-SERVICE/ELKSearch/Utils/elasticsearch_utils.py
@@ -0,0 +1,73 @@
+import re
+from typing import Dict, Any
+from datetime import datetime
+from fastapi.logger import logger
+
+
+def is_space(text: str) -> int:
+ if " " in text:
+ result = 1
+ else:
+ result = 0
+ return result
+
+
+def make_query(operator, field, value) -> Dict[Any, Any]:
+ query = {operator: {field: value}}
+ return query
+
+
+def base_search_query(action: str, sub_action: str, item_list: list) -> Dict:
+ item_dict = {sub_action: []}
+
+ for item in item_list:
+ if len(item.keywords):
+ words = " ".join(item.keywords).strip()
+
+ # field div
+ if 1 < len(item.field):
+ key = "multi_match"
+ detail = {
+ "fields": item.field,
+ "operator": item.operator,
+ "type": "phrase_prefix",
+ }
+ query = make_query(key, action, words)
+ query[key].update(detail)
+ else:
+ key = "match"
+ detail = {action: words, "operator": item.operator}
+ query = make_query(key, item.field[0], detail)
+ # query 추가
+ item_dict[sub_action].append(query)
+ else:
+ continue
+ return item_dict
+
+
+def default_process(els_dict, data):
+ els_dict["_id"] = data["biz_dataset_id"]
+ els_dict["_source"] = data
+ els_dict["_source"]["biz_dataset_id"] = data["biz_dataset_id"]
+ return els_dict
+
+
+def data_process(data):
+ # D-Ocean Project Function
+ els_dict = dict()
+ data["re_ctgry"] = re.sub("[ ]", "", str(data["ctgry"]))
+ data["re_data_shap"] = re.sub("[ ]", "", str(data["data_shap"]))
+ data["re_data_prv_desk"] = re.sub("[ ]", "", str(data["data_prv_desk"]))
+ # test 환경에서 updt_dt가 None값인 경우가 있음
+ if "updt_dt" in data.keys() and data["updt_dt"] and len(data["updt_dt"]) > 24:
+ mic_s = data["updt_dt"].split(".")[-1]
+ if len(data["updt_dt"]) < 27 and len(mic_s) != 6:
+ data["updt_dt"] = f"{data['updt_dt']}0"
+ logger.info(data["updt_dt"])
+ if len(data["updt_dt"]) > 27:
+ data["updt_dt"] = data["updt_dt"][:-3]
+
+ data["updt_dt"] = datetime.strptime(data["updt_dt"][:-3], "%Y-%m-%d %H:%M:%S.%f")
+
+ els_dict = default_process(els_dict, data)
+ return els_dict
diff --git a/API-SERVICE/ELKSearch/Utils/model.py b/API-SERVICE/ELKSearch/Utils/model.py
new file mode 100644
index 00000000..9a79ced4
--- /dev/null
+++ b/API-SERVICE/ELKSearch/Utils/model.py
@@ -0,0 +1,24 @@
+from pydantic import BaseModel, Field
+from typing import List, Union
+
+
+class ConfigOption(BaseModel):
+ field: Union[list, str]
+ keywords: list
+ operator: str
+
+
+class SortOption(BaseModel):
+ field: str
+ order: str
+
+
+class InputModel(BaseModel):
+ chk: bool = False
+ index: str = "biz_meta"
+ from_: int = Field(1, alias="from")
+ size: int = 10
+ resultField: list = []
+ sortOption: List[SortOption] = []
+ searchOption: List[ConfigOption] = []
+ filterOption: List[ConfigOption] = []
diff --git a/API-SERVICE/ELKSearch/__init__.py b/API-SERVICE/ELKSearch/__init__.py
new file mode 100644
index 00000000..781be988
--- /dev/null
+++ b/API-SERVICE/ELKSearch/__init__.py
@@ -0,0 +1,2 @@
+from .Manager import *
+from .Utils import *
diff --git a/API-SERVICE/ELKSearch/conf/bad_word.txt b/API-SERVICE/ELKSearch/conf/bad_word.txt
new file mode 100644
index 00000000..8ff9da46
--- /dev/null
+++ b/API-SERVICE/ELKSearch/conf/bad_word.txt
@@ -0,0 +1,1532 @@
+넌씨눈
+개새끼
+따먹었어
+ㄷㅇㅂ
+시펄
+빠구울
+쓰레기 새끼
+된장녀
+지껄이
+브랄
+십팔넘
+개씁년
+썅놈
+병크
+씨블
+졸좋
+새뀌
+찌랄
+애미랄
+니씨브랄
+죽어버려
+개너미
+zaji
+줘패
+버지뜨더
+쫀 맛
+창넘
+늬미
+개지랄
+니미기
+쇡끼
+닝기리
+ㅈ.ㄴ
+버지물마셔
+자지넣자
+보지자지
+bozi
+인간말종
+존잼
+씌벨
+존똑
+조오웃
+개쓰레기
+씨이붕
+샹년
+대갈
+십8
+똥구녁
+me췬
+쳐쑤셔박어
+슈우벌
+씨새발끼
+개후라들놈
+좆만한놈
+호냥년
+18ㅅㅔ키
+뒤졌
+개똥
+로 꺼.져
+띠불
+도랏
+성폭행
+ㅈ같네
+젖같
+족까
+젖까
+이기야
+씹새
+우미쑤셔
+조온만
+씨섹끼
+미핀놈
+한녀
+미튄
+똥
+씨벨
+싸가지없
+시팔년
+왕버지
+빠가니
+ㅈ1랄
+허졉
+족까내
+게에가튼
+샛기
+띠블넘
+누나강간
+씨바알
+개같
+존나아
+노무노무
+미친구멍
+그1켬
+뻐큐
+개샛기
+뼝신
+좋오웃
+씹창
+씨입새에
+허접
+G랄
+촌씨브라리
+개소리
+보지따먹기
+빨치산
+ㅄ
+시이붕
+보지녀
+허벌자식
+샊히
+씹탱
+슈1발
+너거애비
+좃넘
+조오지
+씨블년
+새키
+사까시
+걸레년
+애애무
+엠븽신
+좃대가리
+병신세리
+십지랄
+쳐-
+애미
+sex해
+ㅎㅃ
+시바앙
+빠가새
+내조지
+병신
+십자슥
+공지
+뒤지길
+18세ㅋㅣ
+똘아이
+shit
+닳은년
+젓떠
+존쎼
+빻은
+눈깔파
+오랄
+졏같
+졸귀
+존나게
+싸물어
+처먹
+벌창
+사까쉬
+십자석
+니뿡
+이새끼
+보라니
+손놈
+뒤져요
+좃까리
+짱개
+후1빨
+ㅅㄲ들
+정액마셔
+에미
+시이풀
+쉬불
+씨뻘
+조개따조?
+ㅂ크
+웅앵
+내버지
+십부랄
+로린
+개에걸래
+유우우방것
+새킈
+니미럴
+꼴랑
+버지쑤셔
+쉰내
+보짓물
+쌍눔
+지1뢰
+애무
+씨비
+쓰바
+시밸
+돌앗구만
+쓰래기같
+개쉐뀌
+돈년
+존트
+쓰발
+새끼라
+새1끼
+불알
+니믜
+존귘
+빠구리
+처먹고
+성괴
+친 ㅅㄲ
+씨입새
+클리토리스
+친 놈
+느금마
+시방색희
+레1친
+바주카자지
+개쩌
+개.웃
+보전깨
+보지벌리
+쌍놈
+좃만이
+빠라
+미치ㄴ
+럼들
+족같내
+존쎄
+ㅅㅐㄲㅣ
+십팔새끼
+띠이발
+아오 ㅅㅂ
+개같이
+꼴보기
+뒤져야
+꺼져요
+거지같은
+색희
+좇
+지뢀
+새끼
+미친~
+은년
+보지뚫어
+씨댕
+ㅈ리
+뒈져
+조온나
+씹덕
+젓물냄새
+망해라
+성교
+버어어지이
+미: 놈
+삼일한
+ㅈㄴ
+똥꾸뇽
+노네들
+가슴주물럭
+개거얼레
+존예
+엿이나
+쎄리
+존내
+좃빠라라
+남미새
+친 년
+뒤질
+귀두
+헐렁보지
+돌앗나
+개독
+좁밥
+난자마셔
+창놈
+꺼지세요
+착짱죽짱
+유방쪼물딱
+달달이
+세키
+보지보지
+유방주물럭
+좃간년
+봉알
+가슴빨아
+보지빨어
+덜은새끼
+십세
+ㅆㅣ바
+미놈
+돈새끼
+시이팔
+딴년
+bitch
+씨부렬
+18num
+로꺼져
+섬숭이
+보지벌려
+잡것
+젖 같
+호모
+후장꽂아
+닥치세
+시벌
+노무현
+애미보지
+애미자지
+ㅅ1발
+애에미
+보지정액
+염뵹
+닥1
+돌은넘
+ㅆㅣㅂㅏ
+설거지론
+쌔리
+엠창
+붕신
+자지구멍
+지뢰
+절라
+좋만
+ㅅ.ㅂ
+퐁퐁남
+쎄끼
+시입세에
+쉬버
+내꺼핧아
+극혐
+18놈
+시이펄
+ㄱㅐㅅㅐㄲl
+에에무
+허벌보지
+보적보
+시부럴
+상폐녀
+쓉새
+십탱구리
+쉬빡
+후우자앙
+조센징
+쉬이이
+혀로보지핧기
+씹쌔
+지1랄
+버지뚫어
+또라인
+니애뷔
+스벌
+개좆
+쌍년
+젓물
+나쁜새끼
+씹할
+시팔넘
+뒤진다
+한 년
+여자따묵기
+자기핧아
+ㅈ같
+사새끼
+지이랄
+덬
+적까
+개붕알
+개자지
+쉬붕
+시-발
+ㅆ1ㄺ
+죵나
+좆년
+개애거얼래
+씨팍
+친노마
+후려
+허덥
+엠-창
+개떡
+가슴핧아
+십셰리
+구씹
+씹자지
+곱창났
+빠네
+디졌
+D쥐고
+씨바
+뚫린입
+조가튼
+걸레보지
+쪽1바리
+병-신
+병딱
+시이불
+따먹자
+ㅌㅓㄹㅐㄱㅣ
+보지털
+막간년
+개씨발
+실프
+좃만한것
+십세리
+미친쉐이
+띠이이벌
+또오라아이
+개늠
+니뽕
+쓰레기새
+보지뜨더
+찍찍이
+씨불알
+쌍쌍보지
+젓까
+뻑유
+싑창
+씨밸
+ㅉ질한
+시팔놈
+취좃
+조오온니
+강간
+미친씨부랄
+유방쪼물럭
+새ㄲㅣ
+잠지물마셔
+빙신
+걔잡지랄
+좀마니
+미:놈
+괘새끼
+돌은새끼
+조개마셔줘
+암캐년
+괴에가튼?
+뚝배기
+색퀴
+좆새끼
+쉬이붕
+허젚
+조개벌려조
+뒷치기
+빠가냐
+운디네
+쪼녜
+자지
+존싫
+쉬박
+병맛
+시새발끼
+쌕스
+쥰트
+조개핧아줘?
+씹미랄
+후라덜
+조까
+시탱
+엠빙신
+어미강간
+시빡
+꼴값
+십탱굴이
+슈ㅣ발
+ㅆㅂ
+친놈
+졸웃
+좆만아
+십새
+걔섀
+호로자
+씨버럼
+어미쑤시자
+쒸8
+18ㅅㅔㅋㅣ
+젓밥
+호로자슥
+좃물
+여어엄
+버따리자지
+늬믜
+펨코
+촌씨브랭이
+이따위
+느그매
+머리텅
+d쥐고
+보지털뽑아
+세엑스
+젖같은
+게지랄놈
+새1키
+뻨큐
+삐걱
+🚬
+개놈
+왕털보지
+썌끼
+입 털
+쿰.척
+시발새끼
+색끼
+년놈
+영자
+늑음
+미시친발
+개걸레
+가슴쪼물딱
+등신
+써글년
+씨뎅
+맛이간년
+쌍넘
+씨입세에
+애에비
+좃도
+버어지
+개넷
+시입세
+좃까
+개젓가튼넘
+미친쇄리
+주글년
+조개보지
+죽여불고
+개후라새끼
+죶
+씹물
+개간
+씹쌔끼
+항문쑤셔
+조개쑤셔줘
+쓰파
+섹쓰
+막대쑤셔줘?
+씨벌년
+개 새끼
+ㅈㅏ위
+띠벌
+쉬밸년
+보지머리박기
+에에미
+존ㄴ나
+퐁퐁녀
+또-라-이
+죤내
+정신나갓
+시이벌
+허버리년
+드응신
+빠아구우리
+쉬팔
+쉬이팔
+jonna
+게이
+시불
+버지벌료
+노알라
+상년
+좆나
+잡년
+따아알따아리
+슈벌
+뇌1텅
+새.끼
+그켬
+졸잼
+맛간년
+보슬아치
+개아기
+보지구녕
+거지같
+빠간가
+트랜스젠더
+대에가리
+글러먹
+첫빠
+빙신쉐이
+게젓
+쓰1레기
+씝창
+시팔
+좆빨아
+닥-쳐
+듣보
+떠라이
+me친
+씨부럴
+ㅅ1ㄲ
+18세키
+시팔새끼
+존니
+십부럴
+잠지뚫어
+ㄱㅅㄲ
+흐젚
+버어지이
+같은 새끼
+씹선
+믜친
+좆까
+씨박색히
+ㅆㅂㄹㅁ
+스ㄹㅜ
+애미잡년
+미친개
+졀리
+싸가지 없
+찌질
+병1신
+썅늠
+항문
+시방쉑희
+개떵
+jaji
+존낙
+난자먹어
+개애걸래
+흐접
+좆같은새끼
+존버
+미치인
+보지핧아줄까
+외1퀴
+슨상님
+보징어
+공지사항
+띠블
+자지빨아
+허벌자지
+쥰나
+보지쥐어짜
+레친
+미친놈
+에무
+자지털
+버어어지
+수셔
+먹.끔
+에라이 퉤
+레기같
+유두빨어
+아아가리
+개씨블
+다꺼져
+쳐받는
+따알따리
+허어벌
+이그니스
+유우까압
+쉬이풀
+대애가리
+꼬추
+자지정개
+개작두년
+쫂
+조오우까튼
+미틴놈
+개씨발넘
+개씁자지
+도라이
+D지고
+버지따먹기
+쉑갸
+자지핧아줘
+쪽본
+조-ㅈ
+쿰척
+조오까튼
+18새끼
+미티넘
+봊
+씹새끼
+개에거얼래
+젼나
+pennis
+쳐발라
+보지핧아줘
+십창녀
+여엄병
+좆까라
+좃마무리
+18ㅅㅐㄲㅣ
+스.루
+옘병
+페니스
+미틴년
+엠플레버
+미틴넘
+자박꼼
+시미발친
+호로잡
+막대쑤셔줘
+자지꽂아
+띠발뇬
+뻑큐
+쉽세
+주둥이
+에라이퉷
+jot같
+여미새
+d져
+고환
+내꺼빨아
+버짓물
+개부달
+걔잡년
+미친색
+창녀버지
+좆도
+졸귘
+지랄
+병닥
+젖탱이
+ㅆ1ㅂ
+좃물냄새
+사까시이
+씨빠빠
+까내리
+정액먹어
+조개넓은년
+엠생
+버지벌려
+섹스하자
+병신셰리
+띠부우울
+씨박색희
+자지뜨더
+젓냄새
+씨이벌
+음경
+개후라년
+뇌 텅
+조옴마니
+염병
+앙기모띠
+개색뀌
+씨팍세끼
+어미따먹자
+기자레기
+자압것
+씹탱이
+씨발
+찌질이
+젖밥
+눈나
+젼낰
+십쉐끼
+젓마무리
+개에가튼
+엿먹어라
+그나물에
+미쳤니
+ㅆㅣ발
+개자식
+ㅆㅣ댕
+찎찎이
+씹자슥
+소음순
+지롤
+시바알
+씨입
+ㅁ친
+개지랄놈
+쉬펄
+씨뷰렬
+니애비
+내미럴
+ㅁㅣ췬
+penis
+김치녀
+ㅅㅡ루
+친년
+ㅂㅊ
+닥2
+빠큐
+보지에자지너
+씨걸
+왕털잠지
+정자핧아
+호로
+돌았네
+띠벨
+졸맛
+띠이벌
+조낸
+ㅆㅂㄻ
+잠짓물마셔
+쌔끼
+개저가튼
+졸멋
+씨벌쉐이
+씨퐁뇬
+개념빠가
+띠빌
+빠굴
+따먹는다
+맘충
+젓만이
+서버
+쉬방
+씌댕
+돌았구만
+시벌탱
+왕털버지
+롬들
+파친
+븅신
+및힌
+그지 같
+존잘
+보지틀래기
+씨빨
+씹년
+개작두넘
+개나대
+뽀지
+쥰니
+보지물
+조개속물
+조개핧아줘
+애미좃물
+드으응신
+부왘
+내자지
+펑글
+유방핧어
+졀라
+잠지털
+후장뚫어
+좀쓰레기
+야dong
+섀키
+앰창
+걸-레
+fuck
+흐졉
+가슴쪼물락
+게세끼
+쓰바새끼
+ㅆㅣ8
+취ㅈ
+씨퐁자지
+곱창나
+자지구녕
+개새기
+ㅆㄺ
+새꺄
+씹세
+졸예
+꼭지
+조온니
+디져라
+띠이버얼
+씨븡
+큰보지
+개잡년
+쓰벌
+망돌
+그지같
+버지냄새
+젓가튼
+18년
+지이라알
+왜저럼
+쉐끼
+존1
+꼴깝
+가슴조물락
+개가튼뇬
+개저엇
+양아치
+조오또
+먹.금
+개넘
+돌으년
+외퀴
+니할애비
+빠가씹새
+괴가튼
+씨팔
+존나
+명존
+이 새끼
+먹끔
+엠뷩신
+조옷만
+쓰댕
+개가튼
+호로자식
+ㅇㅍㅊㅌ
+따아알따리
+보지찌져
+덜떨어
+십녀
+씨이팔
+뒷잇치기
+d지고
+띠이바알
+계새끼
+ㅅ.ㄲ
+잡놈
+더어엉신
+젗같
+씹보지
+개부랄
+조온
+버지털
+자지쓰레기
+yadong
+꼴갑
+wlfkf
+뚫린 입
+씹지랄
+조온마니
+뇌텅
+개고치
+더럽네
+시발년
+nflavor
+왕자지
+띠브울
+좃가튼뇬
+개라슥
+염병할
+뒤치기
+여자ㄸㅏ묵기
+존 나
+니아비
+씹브랄
+기레기
+18nom
+따먹을까
+구1씹
+오크
+배빵
+김대중
+버지썰어
+조우까튼
+개같은년
+색갸
+정자마셔
+화낭년
+발놈
+쥰내
+시박색히
+가슴만져
+뒤져라
+니아범
+보지핧아
+자지핧아
+닝기미
+시발
+조개벌려조?
+개가튼년
+쥐랄
+죠낸
+세끼
+시이발
+졏 같
+쉬팍
+쉬이발
+운영자
+소추
+보지벌리자
+조개마셔줘?
+mi친
+쉬이벌
+개보지년
+쪽바리
+강간한다
+dogbaby
+미쳣네
+ㅂㅅ
+죽여 버리고
+느금
+헤으응
+유방만져
+띠팔
+띠바
+요년
+염-병
+보지핧어
+촌씨브랑이
+굿보지
+도른
+미칭럼
+시1발
+존귀
+씨퐁
+유두핧어
+흉자
+따먹어야지
+대-가-리
+쪼다
+좃보지
+씌발
+뻐규
+좃냄새
+노친네
+씨바라
+미쳤나
+껒여
+미칀
+씹쉐뀌
+허벌년
+믜칀
+쇅끼
+쉬이이이
+씨1발
+시바시바
+쌍보지
+돌앗네
+동생강간
+쉬빨
+pussy
+개마이
+개셈
+개년
+좀물
+머리 텅
+맛없는년
+동성애자
+십탱
+좆
+색키
+새77ㅣ
+씹탱굴이
+보지구멍
+뷰웅신
+쒸발
+정액발사
+쎅쓰
+보지털어
+유방핧아
+정액짜
+가슴빨어
+개젓
+씨벌
+ㅅ발
+ㅅ루
+조깟
+쉽알넘
+짬지
+텐덕
+십팔
+씨8
+니년
+개잡지랄
+보지
+미1친
+사까아시
+씨팍새끼
+닥전
+보쥐
+젓대가리
+쪼다새끼
+ㅂㄹ
+세엑쓰
+씨이발
+씨펄
+게부럴
+병신씨발
+보픈카
+씹팔넘
+미친구녕
+쯰질
+허좁
+미-친
+친구년
+쬰잘
+쬲
+띠부울
+씨븡새끼
+뼈큐
+닥쳐라
+좆만한새끼
+뇌-텅텅
+좃빠구리
+후라덜넘
+보지찢어
+씨댕이
+썅
+십세이
+미치누
+레기네
+존좋
+개걸래
+벌창같은년
+쪼까튼
+별창
+쒸댕
+조개쑤셔줘?
+좆물
+찌1질
+종나
+거시기
+좋만한것
+빻았
+섹끼
+유방
+jazi
+지-랄
+먹1금
+싹스
+자지빨어
+시바
+눈새
+씨발년
+지럴
+줬같은
+친ㅅㄲ
+씨ㅂㅏ
+노옴
+싸개
+좆먹어
+sibal
+따먹어
+니미
+가슴핧어
+좆밥
+조오올라
+씹창녀
+젓같내
+조녜
+쉑쓰
+씌팔
+ㅅㄲ네
+로 꺼져
+쓰브랄쉽세
+mi쳤
+졸싫
+씹못
+쓰벨
+등-신
+펨베
+짱깨
+쌍-판
+🖕
+뇌피셜
+존마니
+좃만아
+쎄엑스
+나빼썅
+와꾸
+십떼끼
+게새끼
+닥후
+시미친발
+ㅆㄹㄱ
+시박쉑히
+좃깟네
+씨발병신
+찝째끼
+시파
+핑끄
+자지핧어
+개보지
+ya동
+호로짜식
+띠이이발
+씨불
+버짓물마셔
+뽄새
+ㅁㅊ
+꺼.지
+디지고
+빠굴이
+슈발
+씹
+대가리
+엿같
+개-새-끼
+boji
+게늠
+졌같은
+좆같은놈
+개후라
+후장뚫어18세키
+띠펄
+십쉐
+엔플레버
+좃빠네
+버지빨어
+조오가튼
+색히
+쉬탱
+머갈
+미친ㅋ
+존.나
+쌕쓰
+개씨발자슥
+붕알
+한년
+凸
+부랄
+섹스해
+에애무
+쳐먹
+닥쳐
+누보햄
+점물
+씨팍넘
+조개따조
+뒤지겠
+좃털
+게저엇
+쓰루
+뽄세
+ㅅㅍ
+죽여뿌고
+ㅅ끼
+ㅉ
+씹버지
+따먹었지
+게자식
+골빈
+써글
+핑프
+씨뱅가리
+쉬발
+또라이
+좃만한쉐이
+쎅스
+ㅅㅋ
+쳐마
+미친넘
+잠지
+새끼야
+똥구뇽
+ㅂㅁㄱ
+쉬이바
+니애미
+후1려
+아닥
+시키가
+유깝
+에에비
+대음순
+찌질한
+븅쉰
+같은새끼
+사까아시이
+보짓물마셔
+김여사
+조또
+항문수셔
+젓나
+시친발미
+씨발롬
+노무
+ㅎㅌㅊ
+씌뎅
+씹뻐럴
+쒸펄
+정액핧아
+앰
+슈레기
+자지빨아줘
+딸달이
+sex
+시팍
+버지구멍
+fuckyou
+ㅇㅒ쁜
+이년
+빠아가
+먹금
+씹부랄
+존1나
+미친 새
+유우방
+화냥년
+걸래년
+빡새끼
+아오 시바
+sex하자
+쉬방새
+씨빡
+쪽발
+딸딸이
+에비
+미친
+떠어라이
+성교해
+저년
+개지랄넘
+죠온나
+여자ㄸㅏ먹기
+호좁
+씹빵구
+방점뱅
+존맛
+처먹을
+시발놈
+빙띤
+자지쑤셔
+지랼
+유방빨아
+좁빠라라
+왕잠지
+섹스
+씨파넘
+띠발
+씨볼탱
+짱꼴라
+자지박어
+창녀
+니아범?
+보지빨아
+싸가지
+주길년
+유발조물락
+tlqkf
+젓가튼쉐이
+창년벼지
+미틴것
+시팍새끼
+시바라지
+ㅈㄹ
+버지핧아
+미췬
+짱골라
+미친년
+애자
+후장
+존웃
+뷰웅시인
+저엊
+쉬벌
+개저씨
+달딸이
+샊기
+쫀귀
+젓같은
+쫀맛
+ㅆㅣ
+성교하자
+골1빈
+벵신
+씹팔
+빠가야로
+글러 먹
+십창
+씨이불
+눈깔 파
+니기미
+뽕알
+후.려
+시빨
+ㅆㅣ팍넘
+십버지
+창년
+오르가즘
+붜지
+빠아아라
+쉬이이이이
+애미씨뱅
+미친새
+저엇
+ㄱㅐㅈㅏ
+미틴
+씨방세
+엑윽
+썅년
+개련
+짱께
+색스
+육갑
+걸레같은년
+떠어라아이
+여자따먹기
+후우장
+창남
+시댕이
+엄창
+18ㅅㅐ끼
+미친새끼
+정신나갔
+씨부랄
+샤발
+죽여버리고
+씨벌탱
+쉬이펄
+시뷰렬
+좇같
+시볼탱
+은새끼
+쉬이불
+나쁜 새끼
+쉽쌔
+개새
+닌기미
+씨입세
+미친쇠리
+돌았나
+런년
+즤랄
+아가리
+내미랄
+빠아구리
+씨가랭넘
+도랐
+씨가랭년
+자위
+입털
+쫓같
+멜리스
+존멋
+보지박어
+좃부랄
+ㅅㅌㅊ
+쌔엑스
+시바류
+허벌
+쉬이방
+썅뇬
+작은보지
+터래기터래기
+뒤이치기
+자지뜯어
+뒤져야지
+애에무
+왕털자지
+쒸팔
+디질
+조올라
+정자먹어
+섹히
+보지물마셔
+버지빨아
+시뷰럴
+느그
+시부울
+쓰뎅
+me틴
+개불랄
+뇬
+개거얼래
+죤나
+풀발
+씨가랭놈
+쉬풀
+씨붕
+zazi
+씹치
+마스터
+좃또
+에라이 퉷
+난자핧아
+ㅅ1ㅂ
+호졉
+빠가십새
+따먹기
+니미랄
+뷩딱
+미친눔
+쉬이빨
+퍄퍄
+꽃휴
+쳐먹고
+뒤지고싶
+걸레핀년
+또오라이
+쫀1
+쑤셔
+씌8
+지 랄
+개씁블
+씨박쉑히
+좃
+ㅆㅣ뎅
+뷰우웅신
+아오시바
+개세
+정병
+씨브럴
+웅엥
+개섹
+보지에자지껴
+자지짤라
+캐럿닷컴
+골 빈
+디-질
+더러운년
+꼬라지
+더엉신
+띠풀
+병1크
+mi틴
+씨퐁보지
+씹귀
+둄마
+뇨온
+버지구녕
+좆만한년
+시방새
+씨퐁넘
+호로새끼
+유두
+조오오조
+세꺄
+깨쌔끼
+씹뽀지
+백보지
+허벌레
+호루자슥
+공알
+씨뷰럴
+새퀴
+보지벌료
+아오ㅅㅂ
+내씨발
+극1혐
+애비
+씹자석
+시부렬
+시녀
+유우깝
+막대핧아줘
+꼴뵈기
+쓰렉
+개색휘
+후빨
+크리토리스
+이프리트
+십때끼
+좆털
+내잠지
+레기다
+개쓰래기
+게가튼
+시붕
+ㅅㅂ
+야동
+씨방새
+뒤져버
+에라이퉤
+졸라
+주둥아리
+미띤
\ No newline at end of file
diff --git a/API-SERVICE/ELKSearch/conf/config.ini b/API-SERVICE/ELKSearch/conf/config.ini
new file mode 100644
index 00000000..2d4f567d
--- /dev/null
+++ b/API-SERVICE/ELKSearch/conf/config.ini
@@ -0,0 +1,23 @@
+# search API config
+[commercial]
+host = 10.10.10.62
+port = 39200
+
+[local]
+host = localhost
+port = 9200
+
+[test]
+host = 192.168.101.44
+port = 39200
+
+# els_update config
+[meta]
+host = 192.168.101.44
+port = 39200
+index = biz_meta
+
+[ckan]
+host = 192.168.101.44
+port = 39200
+index = ckan_data
diff --git a/API-SERVICE/ELKSearch/conf/db_config.ini b/API-SERVICE/ELKSearch/conf/db_config.ini
new file mode 100644
index 00000000..c6beb751
--- /dev/null
+++ b/API-SERVICE/ELKSearch/conf/db_config.ini
@@ -0,0 +1,39 @@
+[local_db]
+host = localhost
+port = 5432
+user = postgres
+password = 0312
+database = dataportal
+schema = meta
+
+[remote_db]
+host = 10.10.20.60
+port = 5432
+user = dpmanager
+password = hello.dp12#$
+database = dataportal
+schema = meta
+
+[commercial_db]
+host = 10.10.10.34
+port = 5432
+user = dpmanager
+password = hello.dp12#$
+database = dataportal
+schema = meta
+
+[test_db]
+host = 192.168.100.126
+port = 25432
+user = dpme
+password = hello.meta12#$
+database = dataportal
+schema = meta
+
+[email_db]
+host = 192.168.100.126
+port = 25432
+user = dpsi
+password = hello.sitemng12#$
+database = dataportal
+schema = sitemng
\ No newline at end of file
diff --git a/API-SERVICE/ELKSearch/conf/mapping.json b/API-SERVICE/ELKSearch/conf/mapping.json
new file mode 100644
index 00000000..ff2c2117
--- /dev/null
+++ b/API-SERVICE/ELKSearch/conf/mapping.json
@@ -0,0 +1,205 @@
+{
+ "settings": {
+ "queries.cache.enabled": "true",
+ "refresh_interval":"10s",
+ "max_shingle_diff": 10,
+ "analysis": {
+ "tokenizer": {
+ "nori_user_dic": {
+ "type": "nori_tokenizer",
+ "decompound_mode": "discard",
+ "user_dictionary": "user_dic.txt"
+ }
+ },
+ "filter": {
+ "nori_pos": {
+ "type": "nori_part_of_speech",
+ "stoptags": [
+ "E", "J", "SC", "SE", "SF", "SP", "SSC", "SSO", "SY", "VCN", "VCP", "VSV", "VX", "XPN", "XSA", "XSN", "XSV"
+ ]
+ },
+ "synonym": {
+ "type": "synonym_graph",
+ "synonyms_path": "synonyms.txt"
+ },
+ "stopwords": {
+ "type": "stop",
+ "stopwords_path": "stopwords.txt"
+ },
+ "shingle_ten": {
+ "type": "shingle",
+ "token_separator": "",
+ "max_shingle_size": 10
+ }
+ },
+ "analyzer": {
+ "korean_analyzer": {
+ "tokenizer": "nori_user_dic",
+ "filter": [
+ "nori_pos", "nori_readingform", "lowercase", "synonym", "stopwords", "remove_duplicates", "shingle_ten"
+ ]
+ }
+ }
+ }
+ },
+ "mappings": {
+ "properties": {
+ "biz_dataset_id": {
+ "type": "keyword"
+ },
+ "data_nm": {
+ "type": "text",
+ "fields": {
+ "korean_analyzer": {
+ "type": "text",
+ "analyzer": "korean_analyzer",
+ "search_analyzer": "standard"
+ }
+ },
+ "fielddata": true
+ },
+ "data_desc": {
+ "type": "text",
+ "fields": {
+ "korean_analyzer": {
+ "type": "text",
+ "analyzer": "korean_analyzer",
+ "search_analyzer": "standard"
+ }
+ }
+ },
+ "ctgry": {
+ "type": "text",
+ "fields": {
+ "korean_analyzer": {
+ "type": "text",
+ "analyzer": "korean_analyzer",
+ "search_analyzer": "standard"
+ }
+ }
+ },
+ "ctgry_id": {
+ "type": "keyword"
+ },
+ "src_url": {
+ "type": "text",
+ "fields": {
+ "korean_analyzer": {
+ "type": "text",
+ "analyzer": "korean_analyzer",
+ "search_analyzer": "standard"
+ }
+ }
+ },
+ "kywrd": {
+ "type": "text",
+ "fields": {
+ "korean_analyzer": {
+ "type": "text",
+ "analyzer": "korean_analyzer",
+ "search_analyzer": "standard"
+ }
+ }
+ },
+ "reg_date": {
+ "type": "date"
+ },
+ "recnt_amd_date": {
+ "type": "date"
+ },
+ "lnk_date": {
+ "type": "date"
+ },
+ "prv_forml": {
+ "type": "text",
+ "fields": {
+ "korean_analyzer": {
+ "type": "text",
+ "analyzer": "korean_analyzer",
+ "search_analyzer": "standard"
+ }
+ }
+ },
+ "data_eng_nm": {
+ "type": "text",
+ "fields": {
+ "korean_analyzer": {
+ "type": "text",
+ "analyzer": "korean_analyzer",
+ "search_analyzer": "standard"
+ }
+ }
+ },
+ "data_type": {
+ "type": "text",
+ "fields": {
+ "korean_analyzer": {
+ "type": "text",
+ "analyzer": "korean_analyzer",
+ "search_analyzer": "standard"
+ }
+ }
+ },
+ "data_clas": {
+ "type": "text",
+ "fields": {
+ "korean_analyzer": {
+ "type": "text",
+ "analyzer": "korean_analyzer",
+ "search_analyzer": "standard"
+ }
+ }
+ },
+ "law_review_ncst_yn": {
+ "type": "keyword"
+ },
+ "secur_review_ncst_yn": {
+ "type": "keyword"
+ },
+ "data_upd_cycl": {
+ "type": "keyword"
+ },
+ "tkcgr": {
+ "type": "keyword"
+ },
+ "tkcg_dept": {
+ "type": "keyword"
+ },
+ "rqtr": {
+ "type": "keyword"
+ },
+ "rqt_dept": {
+ "type": "keyword"
+ },
+ "retv_num": {
+ "type": "integer"
+ },
+ "intrst_data_num": {
+ "type": "integer"
+ },
+ "downl_num": {
+ "type": "integer"
+ },
+ "src_sys": {
+ "type": "text",
+ "fields": {
+ "korean_analyzer": {
+ "type": "text",
+ "analyzer": "korean_analyzer",
+ "search_analyzer": "standard"
+ }
+ }
+ },
+ "file_size": {
+ "type": "text",
+ "fields": {
+ "korean_analyzer": {
+ "type": "text",
+ "analyzer": "korean_analyzer",
+ "search_analyzer": "standard"
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/API-SERVICE/Utils/CommonUtil.py b/API-SERVICE/Utils/CommonUtil.py
new file mode 100644
index 00000000..2505dac9
--- /dev/null
+++ b/API-SERVICE/Utils/CommonUtil.py
@@ -0,0 +1,294 @@
+import argparse
+import configparser
+import os
+import smtplib
+import sys
+import traceback
+import uuid
+from datetime import datetime, timedelta
+from email.mime.multipart import MIMEMultipart
+from email.mime.text import MIMEText
+from pathlib import Path
+from typing import Any, Optional, Dict
+from Utils.keycloak import KeycloakManager
+
+import jwt
+from fastapi.logger import logger
+from passlib.context import CryptContext
+from psycopg2 import pool
+from pytz import timezone
+
+from ApiService.ApiServiceConfig import config
+from ConnectManager import PostgresManager
+
+
+def insert_mail_history(rcv_adr: str, title: str, contents: str, tmplt_cd: str):
+ db = connect_db()
+ sql = f"""
+ INSERT INTO
+ sitemng.tb_email_send_info (email_id, rcv_adr, title, contents, tmplt_cd, sttus, reg_date)
+ VALUES
+ ('{uuid.uuid4()}', '{rcv_adr}', '{title}', '{contents}', '{tmplt_cd}', 'SEND', '{datetime.now()}');"""
+ db.execute(sql)
+
+
+def send_template_mail(replace_text, receiver_addr, msg_type):
+ html_part = template_html(msg_type, replace_text)
+ send_mail(
+ html_part,
+ subject=config.email_auth[f"subject_{msg_type}"],
+ from_=config.email_auth["login_user"],
+ to_=receiver_addr,
+ )
+
+
+def send_mail(msg, **kwargs):
+ try:
+ host = kwargs.pop("email_server_host", config.email_auth.get("server_addr"))
+ port = kwargs.pop("email_server_port", config.email_auth.get("port"))
+ from_ = kwargs.pop("from_", config.email_auth.get("login_user"))
+ password = kwargs.pop("password", config.email_auth.get("login_pass"))
+
+ message = MIMEMultipart("alternative")
+ message["Subject"] = kwargs.pop("subject", "")
+ message["From"] = from_
+ message["To"] = kwargs.pop("to_", "")
+ message.attach(msg)
+
+ stmp = smtplib.SMTP(host=host, port=port)
+ stmp.ehlo()
+ stmp.starttls()
+ stmp.login(from_, password)
+ stmp.send_message(message)
+ stmp.quit()
+ except Exception as e:
+ raise e
+
+
+def template_html(msg_type, msg):
+ template = {
+ "register": (f"{config.root_path}/conf/common/template/emailAthnSend.html", "AUTH_NO"),
+ "password": (f"{config.root_path}/conf/common/template/pwdEmailAthn.html", "AUTH_NO"),
+ "share": (f"{config.root_path}/conf/common/template/shareEmail.html", "URL"),
+ }
+
+ with open(template[msg_type][0], "r") as fd:
+ html = "\n".join(fd.readlines())
+ html = html.replace(template[msg_type][1], msg)
+
+ return MIMEText(html, "html")
+
+
+def convert_data(data) -> str:
+ data = str(data)
+ if data:
+ if data == "now()" or data == "NOW()":
+ return data
+ if data[0] == "`":
+ return data[1:]
+ return f"'{data.strip()}'"
+
+
+def set_log_path():
+ parser = configparser.ConfigParser()
+ parser.read(f"{config.root_path}/conf/{config.category}/logging.conf", encoding="utf-8")
+
+ parser.set(
+ "handler_rotatingFileHandler",
+ "args",
+ f"('{config.root_path}/log/{config.category}/{config.category}.log', 'a', 20000000, 10)",
+ )
+
+ with open(f"{config.root_path}/conf/{config.category}/logging.conf", "w") as f:
+ parser.write(f)
+
+
+def get_config(config_name: str):
+ ano_cfg = {}
+
+ conf = configparser.ConfigParser()
+ config_path = config.root_path + f"/conf/{config.category}/{config_name}"
+ conf.read(config_path, encoding="utf-8")
+ for section in conf.sections():
+ ano_cfg[section] = {}
+ for option in conf.options(section):
+ ano_cfg[section][option] = conf.get(section, option)
+
+ return ano_cfg
+
+
+def parser_params() -> Any:
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--host", type=str, default="127.0.0.1")
+ parser.add_argument("--port", type=int, default=19000)
+ parser.add_argument("--category", default="meta")
+ parser.add_argument("--db_type", default="test")
+
+ return parser.parse_args()
+
+
+def prepare_config() -> None:
+ args = parser_params()
+ config.root_path = str(Path(os.path.dirname(os.path.abspath(__file__))).parent)
+ config.category = args.category
+ api_router_cfg = get_config("config.ini")
+ config.api_config = get_config("api_config.ini")
+ config.server_host = args.host
+ config.server_port = args.port
+ config.db_type = f"{args.db_type}_db"
+ config.db_info = api_router_cfg[config.db_type]
+ config.conn_pool = make_connection_pool(config.db_info)
+ if config.category == "common":
+ config.secret_info = api_router_cfg["secret_info"]
+ config.user_info = api_router_cfg["user_info"]
+ config.pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
+ config.email_auth = api_router_cfg["email_auth"]
+ config.keycloak_info = api_router_cfg["keycloak_info"]
+ if config.category == "meta":
+ config.user_info = api_router_cfg["user_info"]
+ config.email_auth = api_router_cfg["email_auth"]
+
+
+def get_keycloak_manager():
+ return KeycloakManager(config.keycloak_info["keycloak_url"])
+
+
+async def get_admin_token():
+ res = await get_keycloak_manager().generate_admin_token(
+ username=config.keycloak_info["admin_username"],
+ password=config.keycloak_info["admin_password"],
+ grant_type="password",
+ )
+
+ return res.get("data").get("access_token")
+
+
+def make_connection_pool(db_info):
+ conn_pool = pool.SimpleConnectionPool(
+ 1,
+ 20,
+ user=db_info["user"],
+ password=db_info["password"],
+ host=db_info["host"],
+ port=db_info["port"],
+ database=db_info["database"],
+ options=f'-c search_path={db_info["schema"]}',
+ connect_timeout=10,
+ )
+ return conn_pool
+
+
+def connect_db():
+ db = PostgresManager()
+ return db
+
+
+def save_file_for_reload():
+ with open(__file__, "a") as fd:
+ fd.write(" ")
+
+
+def make_res_msg(result, err_msg, data=None, column_names=None, kor_column_names=None):
+ header_list = []
+ for index, column_name in enumerate(column_names):
+ if kor_column_names:
+ header = {
+ "column_name": column_name,
+ "kor_column_name": kor_column_names[index],
+ }
+ else:
+ header = {"column_name": column_name}
+ header_list.append(header)
+
+ if data is None or column_names is None:
+ res_msg = {"result": result, "errorMessage": err_msg}
+ else:
+ res_msg = {
+ "result": result,
+ "errorMessage": err_msg,
+ "data": {"body": data, "header": header_list},
+ }
+ return res_msg
+
+
+def get_exception_info():
+ ex_type, ex_value, ex_traceback = sys.exc_info()
+ trace_back = traceback.extract_tb(ex_traceback)
+ trace_log = "\n".join([str(trace) for trace in trace_back])
+ logger.error(
+ f"\n- Exception Type : {ex_type}\n- Exception Message : {str(ex_value).strip()}\n- Exception Log : \n{trace_log}"
+ )
+ return ex_type.__name__
+
+
+def convert_error_message(exception_name: str):
+ error_message = None
+ if exception_name == "UniqueViolation":
+ error_message = "UNIQUE_VIOLATION"
+ else:
+ error_message = exception_name
+
+ return error_message
+
+
+##### for user info #####
+class IncorrectUserName(Exception):
+ pass
+
+
+class IncorrectPassword(Exception):
+ pass
+
+
+class LeavedUser(Exception):
+ pass
+
+
+def get_user(user_name: str):
+ db = connect_db()
+ user = db.select(
+ f'SELECT * FROM {config.user_info["table"]} WHERE {config.user_info["id_column"]} = {convert_data(user_name)}'
+ )
+ return user
+
+
+def create_token(data: dict, expires_delta: Optional[timedelta] = None):
+ to_encode = data.copy()
+ if expires_delta:
+ expire = datetime.now(timezone("Asia/Seoul")) + expires_delta
+ else:
+ expire = datetime.now(timezone("Asia/Seoul")) + timedelta(minutes=15)
+
+ logger.info(f"commonToken Expire : {expire}")
+ to_encode.update({"exp": expire})
+
+ encoded_jwt = jwt.encode(
+ to_encode,
+ config.secret_info["secret_key"],
+ algorithm=config.secret_info["algorithm"],
+ )
+ return encoded_jwt
+
+
+def make_token_data(user: Dict) -> Dict:
+ token_data_column = config.secret_info["token_data_column"].split(",")
+ token_data = {column: user[column] for column in token_data_column}
+ return token_data
+
+
+def verify_password(plain_password, hashed_password):
+ return config.pwd_context.verify(plain_password, hashed_password)
+
+
+def authenticate_user(username: str, password: str):
+ user = get_user(username)
+ if not user[0]:
+ raise IncorrectUserName
+
+ user = user[0][0]
+ if user["user_sttus"] == "SCSN":
+ raise LeavedUser("user_sttus :: SCSN}")
+
+ if not verify_password(password, user[config.user_info["password_column"]]):
+ raise IncorrectPassword
+ return user
diff --git a/API-SERVICE/Utils/SearchUtil.py b/API-SERVICE/Utils/SearchUtil.py
new file mode 100644
index 00000000..97e498f7
--- /dev/null
+++ b/API-SERVICE/Utils/SearchUtil.py
@@ -0,0 +1,71 @@
+from ELKSearch.Utils.elasticsearch_utils import make_query
+
+
+def data_srttn_index(item_dict):
+ """
+ 검색 데이터의 카운팅을 위한 함수
+ :param item_dict:
+ :return:
+ """
+ for i, item in enumerate(item_dict["filter"]):
+ if "data_srttn" in item["match"].keys():
+ return i
+ else:
+ return None
+
+
+def search_count(es, item_dict, query_dict):
+ index = "biz_meta,v_biz_meta_oversea_els"
+ data_dict = dict()
+ data_srttn = {
+ # search_keyword: (result_key, result_data)
+ "보유데이터": "hasCount",
+ "연동데이터": "innerCount",
+ "외부데이터": "externalCount",
+ "해외데이터": "overseaCount",
+ "전체": "totalCount",
+ }
+
+ i = data_srttn_index(item_dict)
+
+ for ko_nm, eng_nm in data_srttn.items():
+ if i is None:
+ cnt_query = make_query(
+ "match", "data_srttn", {"operator": "OR", "query": ko_nm}
+ )
+ item_dict["filter"].append(cnt_query)
+ i = -1
+ else:
+ item_dict["filter"][i]["match"]["data_srttn"]["query"] = ko_nm
+
+ if ko_nm == "전체":
+ del item_dict["filter"][i]
+
+ query_dict.update(item_dict)
+ cnt_query = make_query("query", "bool", query_dict)
+ cnt = es.conn.count(index=index, body=cnt_query)["count"]
+ data_dict[eng_nm] = cnt
+
+ return data_dict
+
+
+def ckan_query(search_option) -> dict:
+ """
+ 2023-10-20 변경사항
+ ckan_data 사용X
+ 해외데이터 외부데이터는 v_biz_meta_oversea_els 통합
+ :param search_option:
+ :return:
+ """
+ search_format = "(*{0}*)"
+ query_dict = []
+
+ for query in search_option:
+ keywords = [search_format.format(word) for keyword in query.keywords for word in keyword.split(" ")]
+ if len(keywords) > 1:
+ keywords = f" {query.operator.upper()} ".join(keywords)
+ else:
+ keywords = keywords[0]
+ query_dict.append({"query_string": {"query": keywords,"fields": query.field}})
+
+ return {"must": query_dict}
diff --git a/API-SERVICE/Utils/__init__.py b/API-SERVICE/Utils/__init__.py
new file mode 100644
index 00000000..df673ad5
--- /dev/null
+++ b/API-SERVICE/Utils/__init__.py
@@ -0,0 +1,2 @@
+from .CommonUtil import *
+from .SearchUtil import *
\ No newline at end of file
diff --git a/API-SERVICE/Utils/batch_email.py b/API-SERVICE/Utils/batch_email.py
new file mode 100644
index 00000000..b9f4480e
--- /dev/null
+++ b/API-SERVICE/Utils/batch_email.py
@@ -0,0 +1,46 @@
+from email.mime.text import MIMEText
+from fastapi.logger import logger
+from ApiService.ApiServiceConfig import config
+from Utils.CommonUtil import connect_db
+from Utils import send_mail
+
+
+def get_recv_list():
+ # batch 1분에 한번씩 email을 전송하고 status를 req에서 send로 변경한다
+ query = "SELECT * FROM tb_email_send_info WHERE sttus = 'REQ'"
+ db = connect_db()
+ send_list, _ = db.select(query)
+ return send_list
+
+
+def email_handler():
+ send_list = get_recv_list()
+ logger.info(send_list)
+ for email_info in send_list:
+ try:
+ with open(f'{config.root_path}/conf/sitemng/template/{email_info["tmplt_cd"]}Email.html', "r") as fd:
+ html = "\n".join(fd.readlines())
+
+ if email_info["tmplt_cd"] == "share":
+ subject = "[자동차데이터포털] 자동차데이터포털에서 공유한 데이터입니다."
+ html = html.replace("URL", email_info['contents'])
+ else:
+ if email_info["tmplt_cd"] == "noty":
+ subject = "[자동차데이터포털] 자동차데이터포털에서 보내는 알림 메일입니다."
+ else:
+ subject = f"[자동차데이터포털] {email_info['title']} 신청 메일입니다."
+ content = email_info["contents"].split("|")
+ html = html.replace("TITLE", email_info['title'])
+ html = html.replace("CONTENTS1", content[0])
+ html = html.replace("CONTENTS2", content[1])
+
+ html_part = MIMEText(html, "html")
+ send_mail(html_part, subject=subject, to_=email_info['rcv_adr'])
+ except Exception as e:
+ print(e)
+ else:
+ # update status
+ query = f"UPDATE tb_email_send_info SET sttus = 'SEND'" \
+ f"WHERE email_id = '{email_info['email_id']}'"
+ db = connect_db()
+ db.execute(query)
diff --git a/API-SERVICE/Utils/exceptions.py b/API-SERVICE/Utils/exceptions.py
new file mode 100644
index 00000000..98fb70e5
--- /dev/null
+++ b/API-SERVICE/Utils/exceptions.py
@@ -0,0 +1,6 @@
+class InvalidUserInfo(Exception):
+ pass
+
+
+class TokenDoesNotExist(Exception):
+ pass
diff --git a/API-SERVICE/Utils/keycloak.py b/API-SERVICE/Utils/keycloak.py
new file mode 100644
index 00000000..016a58bd
--- /dev/null
+++ b/API-SERVICE/Utils/keycloak.py
@@ -0,0 +1,245 @@
+from typing import Any, Dict, List
+import logging
+import aiohttp
+import urllib.parse
+
+logger = logging.getLogger()
+
+
+class KeycloakManager:
+ _instance = None
+
+ def __new__(cls, *args, **kwargs):
+ if not cls._instance:
+ cls._instance = super().__new__(cls)
+ return cls._instance
+
+ def __init__(self, base_url: str) -> None:
+ self.base_url = base_url
+
+ async def _request_to_keycloak(self, api_url, method, headers, **kwargs):
+ """_summary_
+
+ Args:
+ api_url (_type_): _description_
+ method (_type_): _description_
+ headers (_type_): _description_
+
+ Returns:
+ _type_: _description_
+ """
+ data = urllib.parse.urlencode(kwargs)
+ print(data)
+ async with aiohttp.ClientSession() as session:
+ async with session.request(url=api_url, method=method, headers=headers, data=data) as response:
+ try:
+ ret = await response.json()
+ except Exception:
+ ret = await response.read()
+ return {"status_code": response.status, "data": ret}
+
+ async def generate_admin_token(self, **kwargs) -> Dict:
+ """
+ 관리자계정에 대한 토큰 발급
+
+ Args:
+ username (str):
+ password (str):
+ grant_type (str): refresh_token or password
+
+ Returns:
+ Dict: _description_
+ """
+ headers = {"Content-Type": "application/x-www-form-urlencoded"}
+ return await self._request_to_keycloak(
+ api_url=f"{self.base_url}/realms/master/protocol/openid-connect/token",
+ client_id="admin-cli",
+ method="POST",
+ headers=headers,
+ **kwargs,
+ )
+
+ async def generate_normal_token(self, realm, **kwargs) -> Dict:
+ """
+ 일반회원의 토큰 발급
+
+ Args:
+ realm (_type_): keycloak 인증 그룹
+ grant_type (str): 인증방법('password', 'refresh_token')
+ username (str): 계정명
+ password (str): 패스워드
+ refresh_token (str): 리프레시 토큰
+ client_id (str): keycloak client_id
+ client_secret (str): keycloak_client_id에 대응하는 secret key
+
+ Returns:
+ Dict: _description_
+ """
+
+ headers = {"Content-Type": "application/x-www-form-urlencoded"}
+ return await self._request_to_keycloak(
+ api_url=f"{self.base_url}/realms/{realm}/protocol/openid-connect/token",
+ method="POST",
+ headers=headers,
+ **kwargs,
+ )
+
+ async def token_info(self, realm, **kwargs) -> Dict:
+ """_summary_
+
+ Args:
+ realm (_type_): _description_
+
+ Returns:
+ Dict: _description_
+ """
+
+ headers = {"Content-Type": "application/x-www-form-urlencoded"}
+ return await self._request_to_keycloak(
+ api_url=f"{self.base_url}/realms/{realm}/protocol/openid-connect/token/introspect",
+ method="POST",
+ headers=headers,
+ **kwargs,
+ )
+
+ async def create_user(self, token, realm, **kwargs):
+ headers = {"Content-Type": "application/json", "Authorization": "bearer " + token}
+ async with aiohttp.ClientSession() as session:
+ async with session.request(
+ url=f"{self.base_url}/admin/realms/{realm}/users",
+ method="POST",
+ headers=headers,
+ json=kwargs,
+ ) as response:
+ return {"status_code": response.status, "data": await response.read()}
+
+ async def delete_user(self, token, realm, user_id):
+ headers = {"Authorization": "bearer " + token}
+ return await self._request_to_keycloak(
+ api_url=f"{self.base_url}/admin/realms/{realm}/users/{user_id}", method="DELETE", headers=headers
+ )
+
+ async def get_user_list(self, token, realm):
+ headers = {"Authorization": "bearer " + token}
+ return await self._request_to_keycloak(
+ api_url=f"{self.base_url}/admin/realms/{realm}/users", method="GET", headers=headers
+ )
+
+ async def user_info(self, token, realm):
+ headers = {"Authorization": "bearer " + token}
+ return await self._request_to_keycloak(
+ api_url=f"{self.base_url}/realms/{realm}/protocol/openid-connect/userinfo", method="GET", headers=headers
+ )
+
+ async def user_info_detail(self, token, realm, user_id):
+ headers = {"Authorization": "bearer " + token}
+ return await self._request_to_keycloak(
+ api_url=f"{self.base_url}/admin/realms/{realm}/users/{user_id}", method="GET", headers=headers
+ )
+
+ async def alter_user(self, token, realm, user_id, **kwargs):
+ print(f"kwargs :: {kwargs}")
+ headers = {"Content-Type": "application/json", "Authorization": "bearer " + token}
+ async with aiohttp.ClientSession() as session:
+ async with session.request(
+ url=f"{self.base_url}/admin/realms/{realm}/users/{user_id}",
+ method="PUT",
+ headers=headers,
+ json=kwargs,
+ ) as response:
+ return {"status_code": response.status, "data": await response.read()}
+
+ async def check_user_session(self, token, realm, user_id):
+ headers = {"Authorization": "bearer " + token}
+ return await self._request_to_keycloak(
+ api_url=f"{self.base_url}/admin/realms/{realm}/users/{user_id}/sessions", method="GET", headers=headers
+ )
+
+ async def logout(self, realm, **kwargs):
+ headers = {"Content-Type": "application/x-www-form-urlencoded"}
+ return await self._request_to_keycloak(
+ api_url=f"{self.base_url}/realms/{realm}/protocol/openid-connect/logout",
+ method="POST",
+ headers=headers,
+ **kwargs,
+ )
+
+
+if __name__ == "__main__":
+ import asyncio
+
+ manager = KeycloakManager("http://192.168.101.44:8080")
+ d = asyncio.run(manager.generate_admin_token(username="admin", password="zxcv1234!", grant_type="password"))
+ print(f"admin_token :: {d}")
+ admin_access_token = d.get("data").get("access_token")
+ admin_refresh_token = d.get("data").get("refresh_token")
+ data = {
+ "username": "swyang",
+ "firstName": "seokwoo",
+ "lastName": "yang",
+ "email": "sw@mobigen.com",
+ "emailVerified": False,
+ "enabled": True,
+ "credentials": [{"value": "zxcv1234!"}],
+ "attributes": {"phoneNumber": "010-1234-5678", "gender": "male"},
+ }
+ r = asyncio.run(
+ manager.create_user(
+ realm="kadap",
+ token=admin_access_token,
+ **data,
+ )
+ )
+ d = asyncio.run(
+ manager.generate_normal_token(
+ realm="kadap",
+ username="swyang",
+ password="zxcv1234!",
+ grant_type="password",
+ client_id="uyuni",
+ client_secret="04esVekOjeJZKLHBkgsCQxpbwda41aKW",
+ )
+ )
+ print(f"normal token :: {d}")
+ normal_access_token = d.get("data").get("access_token")
+ normal_refresh_token = d.get("data").get("refresh_token")
+ r = asyncio.run(
+ manager.token_info(
+ realm="kadap",
+ token=normal_access_token,
+ client_id="uyuni",
+ client_secret="04esVekOjeJZKLHBkgsCQxpbwda41aKW",
+ )
+ )
+ print(f"token info :: {r}")
+ r = asyncio.run(manager.user_info(realm="kadap", token=normal_access_token))
+ print(f"user info :: {r}")
+ user_id = r.get("data").get("sub")
+ r = asyncio.run(manager.user_info_detail(token=admin_access_token, realm="kadap", user_id=user_id))
+ print(f"detail :: {r}")
+ data = {
+ "firstName": "seokwoo",
+ "lastName": "yang",
+ "email": "sw@mobigen.com",
+ "emailVerified": False,
+ "credentials": [{"value": "zxcv1234!"}],
+ "attributes": {"phoneNumber": "010-1111-1234", "gender": "male"},
+ }
+ r = asyncio.run(manager.alter_user(token=admin_access_token, realm="kadap", user_id=user_id, **data))
+ print(f"alter {r}")
+ r = asyncio.run(manager.check_user_session(token=admin_access_token, realm="kadap", user_id=user_id))
+ print(f"check :: {r}")
+ r = asyncio.run(
+ manager.logout(
+ realm="kadap",
+ grant_type="password",
+ refresh_token=normal_refresh_token,
+ client_id="uyuni",
+ client_secret="04esVekOjeJZKLHBkgsCQxpbwda41aKW",
+ )
+ )
+ print(f"logout :: {r}")
+ r = asyncio.run(manager.delete_user(token=admin_access_token, realm="kadap", user_id=user_id))
+ print(f"delete :: {r}")
+ r = asyncio.run(manager.get_user_list(token=admin_access_token, realm="kadap"))
+ print(f"list :: {r}")
diff --git a/API-SERVICE/conf/meta/api_config.ini b/API-SERVICE/conf/meta/api_config.ini
new file mode 100644
index 00000000..d588a358
--- /dev/null
+++ b/API-SERVICE/conf/meta/api_config.ini
@@ -0,0 +1,65 @@
+[getCategoryList]
+method = GET
+url = /portal/api/meta/getCategoryList
+sub_dir = meta
+
+[updateCategory]
+method = POST
+url = /portal/api/meta/updateCategory
+sub_dir = meta
+
+[addChildCategory]
+method = POST
+url = /portal/api/meta/addChildCategory
+sub_dir = meta
+
+[getCategoryTree]
+method = GET
+url = /portal/api/meta/getCategoryTree
+sub_dir = meta
+
+[insertElsBizMeta]
+method = POST
+url = /portal/api/meta/insertElsBizMeta
+sub_dir = meta
+
+[getElsBizMetaList]
+method = POST
+url = /portal/api/meta/getElsBizMetaList
+sub_dir = meta
+
+[deleteElsBizMeta]
+method = POST
+url = /portal/api/meta/deleteElsBizMeta
+sub_dir = meta
+
+[getCategoryNmCount]
+method = GET
+url = /portal/api/meta/getCategoryNmCount
+sub_dir = meta
+
+[getPrefixBizMeta]
+method = POST
+url = /portal/api/meta/getPrefixBizMeta
+sub_dir = meta
+
+[metaInsert]
+method = GET
+url = /portal/api/sitemng/metaInsert
+sub_dir = meta
+
+[updateElsBizMeta]
+method = POST
+url = /portal/api/meta/updateElsBizMeta
+sub_dir = meta
+
+[getElsCkanList]
+method = POST
+url = /portal/api/meta/getElsCkanList
+sub_dir = meta
+
+[updateElsBizMetaBulk]
+method = GET
+url = /portal/api/meta/updateElsBizMetaBulk
+sub_dir = meta
+
diff --git a/API-SERVICE/conf/meta/config.ini b/API-SERVICE/conf/meta/config.ini
new file mode 100644
index 00000000..c4c1912e
--- /dev/null
+++ b/API-SERVICE/conf/meta/config.ini
@@ -0,0 +1,37 @@
+[test_db]
+host = 192.168.100.126
+port = 25432
+user = dpmanager
+password = hello.dp12#$
+database = dataportal
+schema = users,meta,sitemng,board,analysis,sysconfig,ckan
+
+[commercial_db]
+host = 10.10.10.34
+port = 5432
+user = dpme
+password = hello.meta12#$
+database = dataportal
+schema = meta,sitemng,users
+
+[secret_info]
+name = user-katech-access-token
+secret = jwt-secrect-b-iris
+
+[user_info]
+table = tb_user_info
+id_column = user_id
+password_column = user_password
+normal_password = user_normal
+user_role = ROLE_ADMIN,ROLE_OPER
+
+# emailAthnSend, emailAthnCnfm
+[email_auth]
+auth_no_len = 10
+subject_register = [자동차데이터포털]회원가입을 위한 인증 메일입니다.
+subject_password = [자동차데이터포털]비밀번호 변경을 위한 인증 메일입니다.
+subject_share = [자동차데이터포털] 자동차데이터포털에서 공유한 데이터입니다.
+server_addr = smtp.office365.com
+port = 587
+login_user = admin@bigdata-car.kr
+login_pass = Pas07054354@katech!
diff --git a/API-SERVICE/conf/meta/logging.conf b/API-SERVICE/conf/meta/logging.conf
new file mode 100644
index 00000000..447dd5b0
--- /dev/null
+++ b/API-SERVICE/conf/meta/logging.conf
@@ -0,0 +1,28 @@
+[loggers]
+keys = root
+
+[logger_root]
+level = INFO
+handlers = console,rotatingFileHandler
+
+[formatters]
+keys = default
+
+[formatter_default]
+format = %(asctime)s %(levelname)s [%(filename)s:%(lineno)d] - %(message)s
+
+[handlers]
+keys = console,rotatingFileHandler
+
+[handler_console]
+class = StreamHandler
+args = (sys.stdout,)
+formatter = default
+level = INFO
+
+[handler_rotatingFileHandler]
+class = handlers.RotatingFileHandler
+formatter = default
+args = ('/home/deep/workspace/cbc/DataPortal/Katech/AP_API_Router/API-SERVICE/log/meta/meta.log', 'a', 20000000, 10)
+level = INFO
+
diff --git a/API-SERVICE/conf/sitemng/api_config.ini b/API-SERVICE/conf/sitemng/api_config.ini
new file mode 100644
index 00000000..69e125b0
--- /dev/null
+++ b/API-SERVICE/conf/sitemng/api_config.ini
@@ -0,0 +1,9 @@
+[getCodeInfo]
+method = GET
+url = /portal/api/sitemng/getCodeInfo
+sub_dir = sitemng
+
+[getCodeList]
+method = GET
+url = /portal/api/sitemng/getCodeList
+sub_dir = sitemng
diff --git a/API-SERVICE/conf/sitemng/config.ini b/API-SERVICE/conf/sitemng/config.ini
new file mode 100644
index 00000000..ec342771
--- /dev/null
+++ b/API-SERVICE/conf/sitemng/config.ini
@@ -0,0 +1,19 @@
+[test_db]
+host = 192.168.100.126
+port = 25432
+user = dpsi
+password = hello.sitemng12#$
+database = dataportal
+schema = sitemng
+
+[commercial_db]
+host = 192.168.54.60
+port = 5432
+user = dpsi
+password = hello.sitemng12#$
+database = dataportal
+schema = sitemng
+
+[secret_info]
+name = user-katech-access-token
+secret = jwt-secrect-b-iris
\ No newline at end of file
diff --git a/API-SERVICE/conf/sitemng/logging.conf b/API-SERVICE/conf/sitemng/logging.conf
new file mode 100644
index 00000000..14ddc74a
--- /dev/null
+++ b/API-SERVICE/conf/sitemng/logging.conf
@@ -0,0 +1,28 @@
+[loggers]
+keys = root
+
+[logger_root]
+level = INFO
+handlers = console,rotatingFileHandler
+
+[formatters]
+keys = default
+
+[formatter_default]
+format = %(asctime)s %(levelname)s [%(filename)s:%(lineno)d] - %(message)s
+
+[handlers]
+keys = console,rotatingFileHandler
+
+[handler_console]
+class = StreamHandler
+args = (sys.stdout,)
+formatter = default
+level = INFO
+
+[handler_rotatingFileHandler]
+class = handlers.RotatingFileHandler
+formatter = default
+args = ('/home/deep/workspace/cbc/DataPortal/Katech/AP_API_Router/API-SERVICE/log/sitemng/sitemng.log', 'a', 20000000, 10)
+level = INFO
+
diff --git a/API-SERVICE/conf/sitemng/template/analysisRequestEmail.html b/API-SERVICE/conf/sitemng/template/analysisRequestEmail.html
new file mode 100644
index 00000000..3bdbde5c
--- /dev/null
+++ b/API-SERVICE/conf/sitemng/template/analysisRequestEmail.html
@@ -0,0 +1,39 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+ 자동차데이터포털 알림 메일
+ |
+
+
+
+
+ CONTENTS1
+
+ [ 요청 데이터 목록 ]
+ CONTENTS2
+
+ |
+
+
+
+
+
+
+
+
+
diff --git a/API-SERVICE/conf/sitemng/template/notyEmail.html b/API-SERVICE/conf/sitemng/template/notyEmail.html
new file mode 100644
index 00000000..bd9b3239
--- /dev/null
+++ b/API-SERVICE/conf/sitemng/template/notyEmail.html
@@ -0,0 +1,39 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+ 자동차데이터포털 알림 메일
+ |
+
+
+ |
+
+ |
+
+
+
+
+
+
+
+
+
diff --git a/API-SERVICE/conf/sitemng/template/shareEmail.html b/API-SERVICE/conf/sitemng/template/shareEmail.html
new file mode 100644
index 00000000..ceff0156
--- /dev/null
+++ b/API-SERVICE/conf/sitemng/template/shareEmail.html
@@ -0,0 +1,37 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+ 자동차데이터포털 데이터 공유 메일
+ |
+
+
+
+
+ 자동차데이터포털에서 공유한 데이터입니다.
+ URL
+
+ |
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/API-SERVICE/conf/sitemng/template/toolApplyEmail.html b/API-SERVICE/conf/sitemng/template/toolApplyEmail.html
new file mode 100644
index 00000000..068bf569
--- /dev/null
+++ b/API-SERVICE/conf/sitemng/template/toolApplyEmail.html
@@ -0,0 +1,37 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+ 자동차데이터포털 TITLE 신청 메일
+ |
+
+
+
+
+ CONTENTS1
+ CONTENTS2
+
+ |
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/API-SERVICE/els_update.py b/API-SERVICE/els_update.py
new file mode 100644
index 00000000..825eaef7
--- /dev/null
+++ b/API-SERVICE/els_update.py
@@ -0,0 +1,74 @@
+import os
+import re
+from pathlib import Path
+from datetime import datetime
+from elasticsearch import helpers
+from ELKSearch.Utils.database_utils import prepare_config, connect_db, select, config
+from ELKSearch.Utils.elasticsearch_utils import data_process, default_process
+
+root_path = str(Path(os.path.dirname(os.path.abspath(__file__))))
+prepare_config(root_path)
+
+
+def insert_meta(db, es):
+ bulk_meta_item = list()
+ db_query = f"SELECT * FROM v_biz_meta_info WHERE status = 'D'"
+ if config.check == "True":
+ today = datetime.today().date()
+ condition = f"AND (DATE(amd_date) >= DATE('{today}')" \
+ f"OR DATE(reg_date) >= DATE('{today}'))"
+ db_query = db_query + condition
+
+ meta_wrap_list = select(db, db_query)[0]
+
+ try:
+ for meta_wrap in meta_wrap_list:
+ els_dict = data_process(meta_wrap)
+ bulk_meta_item.append(els_dict)
+ helpers.bulk(es.conn, bulk_meta_item, index=es.index)
+ except Exception as e:
+ print(e)
+
+
+def insert_ckan(db, es):
+ bulk_meta_item = list()
+ db_query = "SELECT biz_dataset_id, data_nm, data_desc, notes, reg_date, tags, updt_dt" \
+ " FROM v_biz_meta_ckan"
+
+ if config.check == "True":
+ today = datetime.today().date()
+ condition = f"WHERE (DATE(updt_dt) >= DATE('{today}')" \
+ f"OR DATE(reg_date) >= DATE('{today}'))"
+ db_query = db_query + condition
+
+ ckan_wrap_list = select(db, db_query)[0]
+ try:
+ for ckan in ckan_wrap_list:
+ els_dict = default_process(dict(), ckan)
+ bulk_meta_item.append(els_dict)
+ helpers.bulk(es.conn, bulk_meta_item, index=es.index)
+ except Exception as e:
+ print(e)
+
+
+def main():
+ """
+ :param
+ config dir path: {project_path}/ELKSearch/config
+ --category=ckan|meta, elasticsearch config
+ --db_type=test|commercial , database config
+ --check=True|False, True=today False=All
+ :return:
+ """
+ prepare_config(root_path)
+ es = config.es
+ db = connect_db()
+
+ if config.category == "meta":
+ insert_meta(db, es)
+ else:
+ insert_ckan(db, es)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/API-SERVICE/recommend_word.py b/API-SERVICE/recommend_word.py
new file mode 100644
index 00000000..b3c63fea
--- /dev/null
+++ b/API-SERVICE/recommend_word.py
@@ -0,0 +1,80 @@
+import os
+import re
+import ast
+from pathlib import Path
+from datetime import datetime
+from collections import Counter
+from ELKSearch.Utils.database_utils import (
+ prepare_config,
+ connect_db,
+ select,
+ config,
+ execute,
+)
+
+root_path = str(Path(os.path.dirname(os.path.abspath(__file__))))
+
+
+def main():
+ """
+ param:
+ parameter는 els_update.py 에서 공통으로 사용
+ - db_type: conf/config.ini or ELKSearch/conf/db_config.ini
+ - check: type str, False or True, True=누적,False=갱신
+
+ """
+ prepare_config(root_path)
+ db = connect_db()
+
+ # 검색어 로그 불러오기
+ search_file_name = (
+ f"{root_path}/log/meta/{datetime.today().date().strftime('%Y%m%d')}_search.log"
+ )
+ with open(search_file_name, "r") as fp:
+ search_log_file = fp.read().split("\n")[:-1]
+
+ # 필터링할 단어 리스트 불러오기
+ fword_file_name = f"{root_path}/ELKSearch/conf/bad_word.txt"
+ with open(fword_file_name, "r") as fp:
+ bad_word_list = fp.read().split("\n")
+
+ # 자모만 들어가 있는 오타 제외
+ today_search_word = []
+ for words in search_log_file:
+ result = [
+ word
+ for word in ast.literal_eval(words)
+ if re.search("[ㄱ-ㅎㅏ-ㅣ]", word) is None
+ ]
+ today_search_word = today_search_word + result
+
+ # 단어 필터링
+ today_search_word = [
+ word for word in today_search_word if word not in bad_word_list
+ ]
+ today_search_word = Counter(today_search_word)
+
+ # check True : 누적 / False: 갱신
+ if config.check == "True":
+ query = "SELECT * FROM tb_recommend_keyword"
+ recommend_word = select(db, query)[0]
+
+ for word in recommend_word:
+ key = word["keyword"]
+ cnt = word["count"]
+ if key in today_search_word.keys():
+ today_search_word[key] = today_search_word[key] + cnt
+ else:
+ today_search_word[key] = cnt
+
+ for word, cnt in today_search_word.most_common(10):
+ query = (
+ "INSERT INTO tb_recommend_keyword(keyword,count,use_yn)"
+ f"VALUES ('{word}',{cnt}, 'N') ON CONFLICT (keyword) DO UPDATE "
+ f"SET count = {cnt};"
+ )
+ execute(db, db.cursor(), query)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/API-SERVICE/requirements.txt b/API-SERVICE/requirements.txt
new file mode 100644
index 00000000..f269d1e6
--- /dev/null
+++ b/API-SERVICE/requirements.txt
@@ -0,0 +1,48 @@
+aiohttp==3.8.3
+aiosignal==1.2.0
+anyio==3.6.2
+APScheduler==3.9.1.post1
+asgiref==3.5.2
+async-timeout==4.0.2
+asyncssh==2.12.0
+attrs==22.1.0
+bcrypt==4.0.1
+certifi==2022.9.24
+cffi==1.15.1
+charset-normalizer==2.0.12
+click==8.1.3
+cryptography==38.0.1
+decorator==5.1.1
+ecdsa==0.18.0
+elastic-transport==8.4.0
+elasticsearch==8.4.3
+fastapi==0.75.2
+frozenlist==1.3.1
+h11==0.14.0
+idna==3.4
+jose==1.0.0
+multidict==6.0.2
+paramiko==2.10.3
+passlib==1.7.4
+psycopg2==2.8.6
+py==1.11.0
+pyasn1==0.4.8
+pycparser==2.21
+pydantic==1.10.2
+PyJWT==2.6.0
+PyNaCl==1.5.0
+python-jose==3.3.0
+pytz==2022.5
+pytz-deprecation-shim==0.1.0.post0
+requests==2.27.1
+retry==0.9.2
+rsa==4.9
+six==1.16.0
+sniffio==1.3.0
+starlette==0.17.1
+typing_extensions==4.4.0
+tzdata==2022.7
+tzlocal==4.2
+urllib3==1.26.12
+uvicorn==0.16.0
+yarl==1.8.1
diff --git a/API-SERVICE/requirements2.txt b/API-SERVICE/requirements2.txt
new file mode 100644
index 00000000..3dd5bebf
--- /dev/null
+++ b/API-SERVICE/requirements2.txt
@@ -0,0 +1,46 @@
+aiohttp==3.8.3
+aiosignal==1.2.0
+anyio==3.6.2
+asgiref==3.5.2
+async-timeout==4.0.2
+asyncssh==2.12.0
+attrs==22.1.0
+bcrypt==4.0.1
+certifi==2022.9.24
+cffi==1.15.1
+charset-normalizer==2.0.12
+click==8.1.3
+cryptography==38.0.1
+decorator==5.1.1
+ecdsa==0.18.0
+elastic-transport==8.4.0
+elasticsearch==8.4.3
+fastapi==0.75.2
+frozenlist==1.3.1
+h11==0.14.0
+idna==3.4
+jose==1.0.0
+multidict==6.0.2
+paramiko==2.10.3
+passlib==1.7.4
+psycopg2==2.8.6
+psycopg2-binary==2.9.5
+py==1.11.0
+pyasn1==0.4.8
+pycparser==2.21
+pydantic==1.10.2
+PyJWT==2.6.0
+PyNaCl==1.5.0
+python-dotenv==0.21.0
+python-jose==3.3.0
+pytz==2022.5
+requests==2.27.1
+retry==0.9.2
+rsa==4.9
+six==1.16.0
+sniffio==1.3.0
+starlette==0.17.1
+typing_extensions==4.4.0
+urllib3==1.26.12
+uvicorn==0.16.0
+yarl==1.8.1
diff --git a/API-SERVICE/safe_start.sh b/API-SERVICE/safe_start.sh
new file mode 100644
index 00000000..21d985d5
--- /dev/null
+++ b/API-SERVICE/safe_start.sh
@@ -0,0 +1,66 @@
+app_name=API-Service
+router_host=$1
+router_port=$2
+category=$3
+router_db=$4
+
+input() {
+ if [[ $router_host == "" ]];then
+ router_host=192.168.100.126
+ fi
+ if [[ $router_port == "" ]];then
+ router_port=9014
+ fi
+ if [[ $category == "" ]];then
+ category=meta
+ fi
+ if [[ $router_db == "" ]];then
+ router_db=test
+ fi
+}
+
+router_stop() {
+ app=$( ps -ef | grep python | grep server.py | grep ${router_host} | grep ${router_port} | awk '{print $2}' )
+ if [[ $app != "" ]];then
+ exit_app="kill -9 ${app}"
+ echo "Stop Command ( router ) : "${exit_app}
+ $exit_app
+ else
+ echo "Not Found application. ( router )"
+ fi
+}
+
+uvicorn_stop() {
+ uvicorn=$( netstat -nlp | grep ${router_host}':'${router_port} | awk '{print $7}' | tr "/" "\n" )
+ if [[ $uvicorn != "" ]];then
+ for i in $uvicorn
+ do
+ if [[ ${i} == *python* ]];then
+ continue
+ fi
+ exit_uvicorn="kill -9 ${i}"
+ echo "Stop Command ( uvicorn ) : "${exit_uvicorn}
+ $exit_uvicorn
+ done
+ else
+ echo "Not Found application. ( uvicorn )"
+ fi
+}
+
+router_start() {
+ source_path="$( cd "$( dirname "$0" )" && pwd -P )"
+ router_exec="nohup /home/deep/.pyenv/versions/katech_venv/bin/python3 ${source_path}/server.py --host ${router_host} --port ${router_port} --category ${category} --db_type ${router_db} 1> /dev/null 2>&1 &"
+ echo "Start Command : ${router_exec}"
+ nohup /home/deep/.pyenv/versions/katech_venv/bin/python3 ${source_path}/server.py --host ${router_host} --port ${router_port} --category ${category} --db_type ${router_db} 1> /dev/null 2>&1 &
+}
+
+echo "########## Safe Start (${app_name}) ##########"
+echo "========== STOP ${app_name} =========="
+input
+
+router_stop
+sleep 2
+uvicorn_stop
+
+echo "========== START ${app_name} =========="
+router_start
diff --git a/API-SERVICE/server.py b/API-SERVICE/server.py
new file mode 100644
index 00000000..47a5b0dc
--- /dev/null
+++ b/API-SERVICE/server.py
@@ -0,0 +1,42 @@
+import os
+
+import uvicorn
+from apscheduler.schedulers.background import BackgroundScheduler
+from fastapi import FastAPI
+
+from ApiService import ApiService
+from ApiService.ApiServiceConfig import config
+from Utils.CommonUtil import prepare_config, set_log_path
+
+
+prepare_config()
+api_router = ApiService()
+app = FastAPI()
+app.include_router(api_router.router)
+
+
+@app.on_event("startup")
+async def startup():
+ if config.category == "common":
+ from Utils import batch_email
+ scheduler = BackgroundScheduler()
+ scheduler.add_job(batch_email.email_handler, "interval", seconds=5, id="sender")
+ scheduler.start()
+
+
+if __name__ == "__main__":
+ log_dir = f"{config.root_path}/log/{config.category}"
+ if os.path.isdir(log_dir):
+ print("Directory Exists")
+ else:
+ print(f"Make log dir : {log_dir}")
+ os.makedirs(log_dir)
+
+ set_log_path()
+ uvicorn.run(
+ "server:app",
+ host=config.server_host,
+ port=config.server_port,
+ reload=True,
+ log_config=f"{config.root_path}/conf/{config.category}/logging.conf",
+ )
diff --git a/API-SERVICE/start.sh b/API-SERVICE/start.sh
new file mode 100644
index 00000000..95f4f281
--- /dev/null
+++ b/API-SERVICE/start.sh
@@ -0,0 +1,33 @@
+app_name=API-Service
+router_host=$1
+router_port=$2
+category=$3
+router_db=$4
+
+input() {
+ if [[ $router_host == "" ]];then
+ router_host=192.168.100.126
+ fi
+ if [[ $router_port == "" ]];then
+ router_port=9014
+ fi
+ if [[ $category == "" ]];then
+ category=meta
+ fi
+ if [[ $router_db == "" ]];then
+ router_db=test
+ fi
+}
+
+router_start() {
+ source_path="$( cd "$( dirname "$0" )" && pwd -P )"
+ router_exec="nohup python3 ${source_path}/server.py --host ${router_host} --port ${router_port} --category ${category} --db_type ${router_db} 1> /dev/null 2>&1 &"
+ echo "Start Command : ${router_exec}"
+ nohup python3 ${source_path}/server.py --host ${router_host} --port ${router_port} --category ${category} --db_type ${router_db} 1> /dev/null 2>&1 &
+}
+
+echo "########## Start Application (${app_name}) ##########"
+echo "========== START ${app_name} =========="
+input
+
+router_start
diff --git a/API-SERVICE/stop.sh b/API-SERVICE/stop.sh
new file mode 100644
index 00000000..e4227863
--- /dev/null
+++ b/API-SERVICE/stop.sh
@@ -0,0 +1,47 @@
+app_name=API-Service
+router_host=$1
+router_port=$2
+
+input() {
+ if [[ $router_host == "" ]];then
+ router_host=192.168.100.126
+ fi
+ if [[ $router_port == "" ]];then
+ router_port=9014
+ fi
+}
+
+router_stop() {
+ app=$( ps -ef | grep python | grep server.py | grep ${router_host} | grep ${router_port} | awk '{print $2}' )
+ if [[ $app != "" ]];then
+ exit_app="kill -9 ${app}"
+ echo "Stop Command ( router ) : "${exit_app}
+ $exit_app
+ else
+ echo "Not Found application. ( router )"
+ fi
+}
+
+uvicorn_stop() {
+ uvicorn=$( netstat -nlp | grep ${router_host}':'${router_port} | awk '{print $7}' | tr "/" "\n" )
+ if [[ $uvicorn != "" ]];then
+ for i in $uvicorn
+ do
+ if [[ ${i} == *python* ]];then
+ continue
+ fi
+ exit_uvicorn="kill -9 ${i}"
+ echo "Stop Command ( uvicorn ) : "${exit_uvicorn}
+ $exit_uvicorn
+ done
+ else
+ echo "Not Found application. ( uvicorn )"
+ fi
+}
+
+echo "########## Stop Application (${app_name}) ##########"
+echo "========== STOP ${app_name} =========="
+input
+router_stop
+sleep 2
+uvicorn_stop
diff --git a/API_ROUTER/Dockerfile b/API_ROUTER/Dockerfile
deleted file mode 100644
index 790f06ce..00000000
--- a/API_ROUTER/Dockerfile
+++ /dev/null
@@ -1,10 +0,0 @@
-FROM python:3.9.15
-
-COPY . /API_ROUTER
-WORKDIR /API_ROUTER
-RUN pip install -r ./requirements.txt
-
-ENV APP_ENV=local
-
-EXPOSE 8000
-CMD ["gunicorn", "-w", "4", "-k", "uvicorn.workers.UvicornWorker", "app.main:app"]
\ No newline at end of file
diff --git a/API_ROUTER/README.md b/API_ROUTER/README.md
new file mode 100644
index 00000000..dbc157d5
--- /dev/null
+++ b/API_ROUTER/README.md
@@ -0,0 +1,4 @@
+개발서버 및 상용서버에서는 다음과 같이 설정 필요
+API_ROUTER/router/.env
+DB_URL=postgresql://{id}:{pwd}@{ip}:{port}/{dbname}
+SCHEMA=schema1, schema2, ...
diff --git a/API_ROUTER/app/common/config.py b/API_ROUTER/app/common/config.py
deleted file mode 100644
index 98e4af68..00000000
--- a/API_ROUTER/app/common/config.py
+++ /dev/null
@@ -1,106 +0,0 @@
-import os
-from functools import lru_cache
-from typing import Union
-
-from pydantic import BaseSettings, SecretStr, PostgresDsn, validator
-
-
-base_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
-print(f"base_dir :: {base_dir}")
-
-
-class DBInfo(BaseSettings):
- HOST: str = ""
- PORT: str = ""
- USER: str = ""
- PASS: SecretStr = ""
- BASE: str = ""
-
- def get_dsn(self):
- return ""
-
-
-class PGInfo(DBInfo):
- type: str = "orm"
- SCHEMA: str = ""
-
- def get_dsn(self):
- return str(
- PostgresDsn.build(
- scheme="postgresql",
- host=self.HOST,
- port=self.PORT,
- user=self.USER,
- password=self.PASS.get_secret_value(),
- path=f"/{self.BASE}",
- )
- )
-
-
-class TiberoInfo(DBInfo):
- type: str = "tibero"
-
- def get_dsn(self):
- return f"DSN={self.BASE};UID={self.USER};PWD={self.PASS.get_secret_value()}"
-
-
-class Settings(BaseSettings):
- BASE_DIR = base_dir
- DB_POOL_RECYCLE: int
- DB_ECHO: bool
- RELOAD: bool
- TESTING: bool
-
- DB_INFO: DBInfo = DBInfo()
-
- DB_URL: Union[str, PostgresDsn] = None
-
- @validator("DB_URL", pre=True, always=True)
- def assemble_db_url(cls, v, values):
- if all(value is not None for value in values.values()):
- return values.get("DB_INFO").get_dsn()
- raise ValueError("Not all PostgreSQL database connection values were provided.")
-
-
-class ProdSettings(Settings):
- DB_POOL_RECYCLE: int = 900
- DB_ECHO: bool = True
- RELOAD = False
- TESTING = False
-
- class Config:
- env_file = f"{base_dir}/.env"
- env_file_encoding = "utf-8"
-
-
-class LocalSettings(Settings):
- TESTING: bool = False
- DB_POOL_RECYCLE: int = 900
- DB_ECHO: bool = True
- RELOAD: bool = False
-
- DB_INFO = PGInfo(
- HOST="192.168.100.126", PORT="25432", USER="dpmanager", PASS="hello.dp12#$", BASE="dataportal", SCHEMA="sitemng"
- )
-
- # DB_INFO: TiberoInfo = TiberoInfo(HOST="192.168.101.164", PORT="8629", USER="dhub", PASS="dhub1234", BASE="tibero")
-
-
-class TestSettings(LocalSettings):
- TESTING = True
- RELOAD = True
-
-
-@lru_cache
-def get_settings():
- env = os.getenv("APP_ENV", "prod")
- print(f"env :: {env}")
- return {"local": LocalSettings(), "test": TestSettings(), "prod": ProdSettings()}[env]
-
-
-settings = get_settings()
-
-
-# with open(os.path.join(base_dir, "logging.json")) as f:
-# log_config = json.load(f)
-# logging.config.dictConfig(log_config)
diff --git a/API_ROUTER/app/database/conn.py b/API_ROUTER/app/database/conn.py
deleted file mode 100644
index ce8ece65..00000000
--- a/API_ROUTER/app/database/conn.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from sqlalchemy.orm import declarative_base
-
-from app.common.config import settings
-
-Base = declarative_base()
-db = None
-if settings.DB_INFO.type == "tibero":
- from libs.database.tibero import TiberoConnector
-
- db = TiberoConnector()
-elif settings.DB_INFO.type == "orm":
- from libs.database.orm import SQLAlchemyConnector
-
- db = SQLAlchemyConnector(Base)
diff --git a/API_ROUTER/app/schemas.py b/API_ROUTER/app/schemas.py
deleted file mode 100644
index 4c757bb2..00000000
--- a/API_ROUTER/app/schemas.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from pydantic import BaseModel
-from typing import Optional
-
-
-class RouteInfo(BaseModel):
- url: str
- ip_adr: str
diff --git a/API_ROUTER/docker-compose.yml b/API_ROUTER/docker-compose.yml
deleted file mode 100644
index 012b119c..00000000
--- a/API_ROUTER/docker-compose.yml
+++ /dev/null
@@ -1,9 +0,0 @@
-services:
- api_router:
- image: api-router:1.0
- build:
- context: .
- dockerfile: Dockerfile
- container_name: api-router
- ports:
- - "9010:8000"
\ No newline at end of file
diff --git a/API_ROUTER/logging.json b/API_ROUTER/logging.json
deleted file mode 100644
index 27da2456..00000000
--- a/API_ROUTER/logging.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "version": 1,
- "disable_existing_loggers": false,
- "formatters": {
- "default": {
- "format": "%(asctime)s %(levelname)s [%(filename)s:%(lineno)d] - %(message)s"
- }
- },
- "handlers": {
- "console": {
- "class": "logging.StreamHandler",
- "level": "DEBUG",
- "formatter": "default"
- }
- },
- "loggers": {
- "root": {
- "level": "DEBUG",
- "handlers": ["console"],
- "propagate": false
- }
- }
-}
\ No newline at end of file
diff --git a/API_ROUTER/requirements.txt b/API_ROUTER/requirements.txt
deleted file mode 100644
index c06e9fe4..00000000
--- a/API_ROUTER/requirements.txt
+++ /dev/null
@@ -1,22 +0,0 @@
-aiohttp==3.8.4
-aiosignal==1.3.1
-anyio==3.6.2
-async-timeout==4.0.2
-attrs==23.1.0
-charset-normalizer==3.1.0
-click==8.1.3
-fastapi==0.95.1
-frozenlist==1.3.3
-gunicorn==20.1.0
-h11==0.14.0
-idna==3.4
-multidict==6.0.4
-psycopg2==2.9.6
-pydantic==1.10.7
-python-dotenv==1.0.0
-sniffio==1.3.0
-SQLAlchemy==2.0.9
-starlette==0.26.1
-typing_extensions==4.5.0
-uvicorn==0.21.1
-yarl==1.8.2
diff --git a/API_ROUTER/app/utils/__init__.py b/API_ROUTER/router/app/__init__.py
similarity index 100%
rename from API_ROUTER/app/utils/__init__.py
rename to API_ROUTER/router/app/__init__.py
diff --git a/API_SERVICE/common_service/__init__.py b/API_ROUTER/router/app/common/__init__.py
similarity index 100%
rename from API_SERVICE/common_service/__init__.py
rename to API_ROUTER/router/app/common/__init__.py
diff --git a/API_ROUTER/router/app/common/config.py b/API_ROUTER/router/app/common/config.py
new file mode 100644
index 00000000..4ad35178
--- /dev/null
+++ b/API_ROUTER/router/app/common/config.py
@@ -0,0 +1,113 @@
+import logging.config
+import os
+from functools import lru_cache
+
+from pydantic import BaseSettings, PostgresDsn
+
+
+base_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+print(f"router base_dir :: {base_dir}")
+
+
+class DBInfo(BaseSettings):
+ DB_POOL_RECYCLE: int = 900
+ DB_ECHO: bool = True
+ DB_URL: str
+
+
+class PGInfo(DBInfo):
+ SCHEMA: str
+
+ class Config:
+ env_file = f"{base_dir}/.env"
+ env_file_encoding = "utf-8"
+
+ """
+ pydantic validator를 통한 변수할당 예시
+ @validator("DB_URL", pre=True, always=True)
+ def assemble_db_url(cls, v, values):
+ if all(value is not None for value in values.values()):
+ return values.get("DB_INFO").get_dsn()
+ raise ValueError("Not all PostgreSQL database connection values were provided.")
+ """
+
+
+class Settings(BaseSettings):
+ BASE_DIR = base_dir
+ RELOAD: bool
+ TESTING: bool
+
+ DB_INFO: DBInfo
+
+
+class ProdSettings(Settings):
+ RELOAD = False
+ TESTING = False
+
+ DB_INFO: PGInfo = PGInfo()
+
+
+class LocalSettings(Settings):
+ TESTING: bool = False
+ RELOAD: bool = False
+
+ DB_INFO: PGInfo = PGInfo(
+ DB_POOL_RECYCLE=900,
+ DB_ECHO=False,
+ SCHEMA="sitemng,users,meta,iag,ckan,board,analysis",
+ DB_URL=str(
+ PostgresDsn.build(
+ scheme="postgresql",
+ host="localhost",
+ port="5432",
+ user="dpmanager",
+ password="hello.dp12#$",
+ path="/dataportal",
+ )
+ ),
+ )
+
+
+class TestSettings(LocalSettings):
+ TESTING = True
+ RELOAD = True
+
+
+@lru_cache
+def get_settings() -> Settings:
+ env = os.getenv("APP_ENV", "prod")
+ print(f"env :: {env}")
+ return {"local": LocalSettings(), "test": TestSettings(), "prod": ProdSettings()}[env]
+
+
+settings = get_settings()
+print(settings)
+
+log_config = {
+ "version": 1,
+ "disable_existing_loggers": False,
+ "formatters": {
+ "standard": {"format": "%(asctime)s %(levelname)s [%(filename)s:%(lineno)d] - %(message)s"},
+ },
+ "handlers": {
+ "console_handler": {
+ "class": "logging.StreamHandler",
+ "level": "DEBUG",
+ "formatter": "standard",
+ },
+ },
+ "root": {"level": "DEBUG", "handlers": ["console_handler"], "propagate": False},
+}
+
+if "prod" == os.getenv("APP_ENV", "prod"):
+ log_config["handlers"]["file_handler"] = {
+ "class": "logging.handlers.RotatingFileHandler",
+ "filename": os.path.join(base_dir, "log", "router.log"),
+ "mode": "a",
+ "maxBytes": 20000000,
+ "backupCount": 10,
+ "level": "INFO",
+ "formatter": "standard",
+ }
+ log_config["root"]["handlers"].append("file_handler")
+logging.config.dictConfig(log_config)
diff --git a/API_ROUTER/app/common/const.py b/API_ROUTER/router/app/common/const.py
similarity index 100%
rename from API_ROUTER/app/common/const.py
rename to API_ROUTER/router/app/common/const.py
diff --git a/API_ROUTER/router/app/database/conn.py b/API_ROUTER/router/app/database/conn.py
new file mode 100644
index 00000000..a3b7b298
--- /dev/null
+++ b/API_ROUTER/router/app/database/conn.py
@@ -0,0 +1,7 @@
+from sqlalchemy.ext.automap import automap_base
+
+from libs.database.orm import SQLAlchemyConnector
+
+Base = automap_base()
+
+db = SQLAlchemyConnector(Base)
diff --git a/API_ROUTER/app/database/models.py b/API_ROUTER/router/app/database/models.py
similarity index 96%
rename from API_ROUTER/app/database/models.py
rename to API_ROUTER/router/app/database/models.py
index 85d23197..60988036 100644
--- a/API_ROUTER/app/database/models.py
+++ b/API_ROUTER/router/app/database/models.py
@@ -1,7 +1,7 @@
from sqlalchemy import String, Column, ForeignKey, PrimaryKeyConstraint
from sqlalchemy.orm import relationship
-from app.database.conn import Base
+from router.app.database.conn import Base
from libs.database.models import BaseMixin
diff --git a/API_ROUTER/app/exceptions/exception.py b/API_ROUTER/router/app/exceptions/exception.py
similarity index 100%
rename from API_ROUTER/app/exceptions/exception.py
rename to API_ROUTER/router/app/exceptions/exception.py
diff --git a/API_ROUTER/app/main.py b/API_ROUTER/router/app/main.py
similarity index 81%
rename from API_ROUTER/app/main.py
rename to API_ROUTER/router/app/main.py
index e2a77bcc..7e7c934b 100644
--- a/API_ROUTER/app/main.py
+++ b/API_ROUTER/router/app/main.py
@@ -3,9 +3,9 @@
import uvicorn
from fastapi import FastAPI
-from app.common.config import settings
-from app.database.conn import db
-from app.routes import index
+from router.app.common.config import settings
+from router.app.database.conn import db
+from router.app.routes import index
logger = logging.getLogger()
diff --git a/API_SERVICE/common_service/common/__init__.py b/API_ROUTER/router/app/routes/__init__.py
similarity index 100%
rename from API_SERVICE/common_service/common/__init__.py
rename to API_ROUTER/router/app/routes/__init__.py
diff --git a/API_ROUTER/app/routes/index.py b/API_ROUTER/router/app/routes/index.py
similarity index 62%
rename from API_ROUTER/app/routes/index.py
rename to API_ROUTER/router/app/routes/index.py
index 55b5110d..cfb7f55d 100644
--- a/API_ROUTER/app/routes/index.py
+++ b/API_ROUTER/router/app/routes/index.py
@@ -1,14 +1,15 @@
import copy
import json
import logging
+from datetime import datetime
import aiohttp
from fastapi import APIRouter, Depends
from starlette.requests import Request
from starlette.responses import JSONResponse
-from app.common import const
-from app.database.conn import db
+from router.app.common import const
+from router.app.database.conn import db
from libs.database.connector import Executor
logger = logging.getLogger()
@@ -16,19 +17,22 @@
router = APIRouter()
-@router.get("/route/common/me")
+@router.get("/me")
async def me(request: Request):
- return {"result": 1, "errorMesage": "", "data": request.scope["client"][0]}
+ return {"result": 1, "errorMessage": "", "data": request.scope["client"][0]}
-@router.api_route("{route_path:path}", methods=["GET", "POST"])
+@router.api_route("{route_path:path}", methods=["GET", "POST", "PUT", "DELETE"])
async def index(request: Request, route_path: str, session: Executor = Depends(db.get_db)):
method = request.method
headers = get_headers(request.headers)
- query_params = request.query_params
+ query_params = {
+ "workip": request.scope["client"][0],
+ "workdt": datetime.now().strftime("%Y%m%d%H%M%S"),
+ }
+ query_params.update(request.query_params.items())
data = None
- status = 200
- if method == "POST":
+ if method in ["POST", "PUT", "DELETE"]:
try:
data = await request.json()
except json.JSONDecodeError:
@@ -38,22 +42,22 @@ async def index(request: Request, route_path: str, session: Executor = Depends(d
if not row:
logger.error(f"API INFO NOT FOUND, url :: {route_path}, method :: {method}")
- return JSONResponse(content={"result": 0, "errorMessage": "API INFO NOT FOUND."}, status_code=404)
+ return JSONResponse(
+ content={"result": 0, "errorMessage": "API INFO NOT FOUND."},
+ status_code=404,
+ )
logger.info(f"API :: {row}")
remote_url = "http://" + row[const.ROUTE_IP_FIELD] + row[const.ROUTE_API_URL_FIELD]
- cookies = {}
- try:
- cookies, result, status = await request_to_service(remote_url, method, query_params, data, headers)
- except Exception as e:
- logger.error(e, exc_info=True)
- result = {"result": 0, "errorMessage": type(e).__name__}
+ cookies, result, status = await request_to_service(remote_url, method, query_params, data, headers)
response = JSONResponse(content=result, status_code=status)
- for k, v in cookies.items():
- response.set_cookie(key=k, value=v, max_age=3600, secure=False, httponly=True)
+ if cookies:
+ for k, v in cookies.items():
+ logger.debug(f"k :: {k} {type(k)}, v :: {v} {type(v)}")
+ response.set_cookie(key=k, value=v, domain=v.get("domain"), max_age=3600, secure=False, httponly=False)
return response
diff --git a/API_SERVICE/common_service/database/__init__.py b/API_ROUTER/router/app/utils/__init__.py
similarity index 100%
rename from API_SERVICE/common_service/database/__init__.py
rename to API_ROUTER/router/app/utils/__init__.py
diff --git a/API_ROUTER/router/gunicorn.conf.py b/API_ROUTER/router/gunicorn.conf.py
new file mode 100644
index 00000000..21d05a03
--- /dev/null
+++ b/API_ROUTER/router/gunicorn.conf.py
@@ -0,0 +1,246 @@
+# Gunicorn configuration file.
+#
+# Server socket
+#
+# bind - The socket to bind.
+#
+# A string of the form: 'HOST', 'HOST:PORT', 'unix:PATH'.
+# An IP is a valid HOST.
+#
+# backlog - The number of pending connections. This refers
+# to the number of clients that can be waiting to be
+# served. Exceeding this number results in the client
+# getting an error when attempting to connect. It should
+# only affect servers under significant load.
+#
+# Must be a positive integer. Generally set in the 64-2048
+# range.
+#
+import os
+
+bind = "0.0.0.0:8000"
+backlog = 2048
+
+#
+# Worker processes
+#
+# workers - The number of worker processes that this server
+# should keep alive for handling requests.
+#
+# A positive integer generally in the 2-4 x $(NUM_CORES)
+# range. You'll want to vary this a bit to find the best
+# for your particular application's work load.
+#
+# worker_class - The type of workers to use. The default
+# sync class should handle most 'normal' types of work
+# loads. You'll want to read
+# http://docs.gunicorn.org/en/latest/design.html#choosing-a-worker-type
+# for information on when you might want to choose one
+# of the other worker classes.
+#
+# A string referring to a Python path to a subclass of
+# gunicorn.workers.base.Worker. The default provided values
+# can be seen at
+# http://docs.gunicorn.org/en/latest/settings.html#worker-class
+#
+# worker_connections - For the eventlet and gevent worker classes
+# this limits the maximum number of simultaneous clients that
+# a single process can handle.
+#
+# A positive integer generally set to around 1000.
+#
+# timeout - If a worker does not notify the master process in this
+# number of seconds it is killed and a new worker is spawned
+# to replace it.
+#
+# Generally set to thirty seconds. Only set this noticeably
+# higher if you're sure of the repercussions for sync workers.
+# For the non sync workers it just means that the worker
+# process is still communicating and is not tied to the length
+# of time required to handle a single request.
+#
+# keepalive - The number of seconds to wait for the next request
+# on a Keep-Alive HTTP connection.
+#
+# A positive integer. Generally set in the 1-5 seconds range.
+#
+# reload - Restart workers when code changes.
+#
+# This setting is intended for development. It will cause
+# workers to be restarted whenever application code changes.
+workers = 3
+threads = 3
+worker_class = "uvicorn.workers.UvicornWorker"
+worker_connections = 1000
+timeout = 60
+keepalive = 2
+reload = False
+
+#
+# spew - Install a trace function that spews every line of Python
+# that is executed when running the server. This is the
+# nuclear option.
+#
+# True or False
+#
+
+spew = False
+
+#
+# Server mechanics
+#
+# daemon - Detach the main Gunicorn process from the controlling
+# terminal with a standard fork/fork sequence.
+#
+# True or False
+#
+# raw_env - Pass environment variables to the execution environment.
+#
+# pidfile - The path to a pid file to write
+#
+# A path string or None to not write a pid file.
+#
+# user - Switch worker processes to run as this user.
+#
+# A valid user id (as an integer) or the name of a user that
+# can be retrieved with a call to pwd.getpwnam(value) or None
+# to not change the worker process user.
+#
+# group - Switch worker process to run as this group.
+#
+# A valid group id (as an integer) or the name of a user that
+# can be retrieved with a call to pwd.getgrnam(value) or None
+# to change the worker processes group.
+#
+# umask - A mask for file permissions written by Gunicorn. Note that
+# this affects unix socket permissions.
+#
+# A valid value for the os.umask(mode) call or a string
+# compatible with int(value, 0) (0 means Python guesses
+# the base, so values like "0", "0xFF", "0022" are valid
+# for decimal, hex, and octal representations)
+#
+# tmp_upload_dir - A directory to store temporary request data when
+# requests are read. This will most likely be disappearing soon.
+#
+# A path to a directory where the process owner can write. Or
+# None to signal that Python should choose one on its own.
+#
+
+daemon = False
+pidfile = "./gunicorn-router.pid"
+umask = 0
+user = None
+group = None
+tmp_upload_dir = None
+
+#
+# Logging
+#
+# logfile - The path to a log file to write to.
+#
+# A path string. "-" means log to stdout.
+#
+# loglevel - The granularity of log output
+#
+# A string of "debug", "info", "warning", "error", "critical"
+#
+
+
+def get_log_path():
+ import os
+
+ path_ = os.path.join(os.path.dirname(os.path.abspath(__file__)), "log")
+ if not os.path.exists(path_):
+ os.makedirs(path_)
+ print(f"make dir {path_}")
+
+ return path_
+
+
+app_env = os.getenv("APP_ENV", "prod")
+if app_env == "prod":
+ loglevel = "info"
+ log_name = "gunicorn-router"
+ log_dir_path = get_log_path()
+ logfile = os.path.join(log_dir_path, log_name + ".log")
+ errorlog = os.path.join(log_dir_path, log_name + "-error.log")
+ accesslog = logfile
+else:
+ loglevel = "debug"
+ logfile = "-"
+ errorlog = "-"
+ accesslog = "-"
+
+access_log_format = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"'
+
+#
+# Process naming
+#
+# proc_name - A base to use with setproctitle to change the way
+# that Gunicorn processes are reported in the system process
+# table. This affects things like 'ps' and 'top'. If you're
+# going to be running more than one instance of Gunicorn you'll
+# probably want to set a name to tell them apart. This requires
+# that you install the setproctitle module.
+#
+# A string or None to choose a default of something like 'gunicorn'.
+#
+
+proc_name = "API-Router"
+
+
+#
+# Server hooks
+#
+# post_fork - Called just after a worker has been forked.
+#
+# A callable that takes a server and worker instance
+# as arguments.
+#
+# pre_fork - Called just prior to forking the worker subprocess.
+#
+# A callable that accepts the same arguments as after_fork
+#
+# pre_exec - Called just prior to forking off a secondary
+# master process during things like config reloading.
+#
+# A callable that takes a server instance as the sole argument.
+#
+
+
+def post_fork(server, worker):
+ server.log.info("Worker spawned (pid: %s)", worker.pid)
+
+
+def pre_fork(server, worker):
+ pass
+
+
+def pre_exec(server):
+ server.log.info("Forked child, re-executing.")
+
+
+def when_ready(server):
+ server.log.info("Server is ready. Spawning workers")
+
+
+def worker_int(worker):
+ worker.log.info("worker received INT or QUIT signal")
+
+ # get traceback info
+ import threading, sys, traceback
+
+ id2name = {th.ident: th.name for th in threading.enumerate()}
+ code = []
+ for threadId, stack in sys._current_frames().items():
+ code.append("\n# Thread: %s(%d)" % (id2name.get(threadId, ""), threadId))
+ for filename, lineno, name, line in traceback.extract_stack(stack):
+ code.append('File: "%s", line %d, in %s' % (filename, lineno, name))
+ if line:
+ code.append(" %s" % (line.strip()))
+ worker.log.debug("\n".join(code))
+
+
+def worker_abort(worker):
+ worker.log.info("worker received SIGABRT signal")
diff --git a/API_ROUTER/gunicorn.sh b/API_ROUTER/router/gunicorn.sh
similarity index 94%
rename from API_ROUTER/gunicorn.sh
rename to API_ROUTER/router/gunicorn.sh
index b3b47c04..f50cdb72 100755
--- a/API_ROUTER/gunicorn.sh
+++ b/API_ROUTER/router/gunicorn.sh
@@ -7,7 +7,7 @@ echo $pid_path
# gunicorn 실행 명령어
start_gunicorn() {
- gunicorn app.main:app --bind 0.0.0.0:8010 -c gunicorn.conf.py -D --pid $pid_path
+ gunicorn app.main:app --bind 0.0.0.0:9010 -c gunicorn.conf.py -D --pid $pid_path
sleep 2
pid=$(cat $pid_path)
echo "Gunicorn started. PID: $pid"
diff --git a/API_ROUTER/router/requirements.txt b/API_ROUTER/router/requirements.txt
new file mode 100644
index 00000000..b76eb6f1
--- /dev/null
+++ b/API_ROUTER/router/requirements.txt
@@ -0,0 +1,56 @@
+aiohttp==3.8.4
+aiosignal==1.3.1
+anyio==3.6.2
+APScheduler==3.10.1
+async-timeout==4.0.2
+attrs==23.1.0
+bcrypt==4.0.1
+boto3==1.28.67
+botocore==1.31.67
+certifi==2022.12.7
+cffi==1.15.1
+charset-normalizer==3.1.0
+click==8.1.3
+cryptography==40.0.2
+elastic-transport==8.4.0
+elasticsearch==8.7.0
+exceptiongroup==1.1.2
+fastapi==0.95.1
+frozenlist==1.3.3
+greenlet==2.0.2
+gunicorn==20.1.0
+h11==0.14.0
+httpcore==0.17.3
+httpx==0.24.1
+idna==3.4
+iniconfig==2.0.0
+jmespath==1.0.1
+multidict==6.0.4
+numpy==1.26.0
+packaging==23.1
+pandas==2.1.0
+passlib==1.7.4
+Pillow==10.0.1
+pluggy==1.2.0
+psycopg2-binary==2.9.9
+pycparser==2.21
+pycryptodome==3.18.0
+pydantic==1.10.7
+PyJWT==2.7.0
+pytest==7.4.0
+python-dateutil==2.8.2
+python-dotenv==1.0.0
+pytz==2023.3
+requests==2.31.0
+s3transfer==0.7.0
+six==1.16.0
+sniffio==1.3.0
+SQLAlchemy==2.0.22
+starlette==0.26.1
+tomli==2.0.1
+typing_extensions==4.5.0
+tzdata==2023.3
+tzlocal==5.0.1
+urllib3==1.26.15
+uvicorn==0.21.1
+yarl==1.8.2
diff --git a/API_SERVICE/README.md b/API_SERVICE/README.md
index e69de29b..15938d7b 100644
--- a/API_SERVICE/README.md
+++ b/API_SERVICE/README.md
@@ -0,0 +1,18 @@
+## [katech-dev]
+- emailAthnCnfm.py
+- emailAthnPass.py
+- emailAthnSend.py
+- emailDataShare.py
+
+## [katech]
+dir : ~/common_service/route/v1/email.py
+
+## [개발 및 상용서버 환경설정]
+
+service_name = common_service | login_service | meta_service | ...
+
+API_SERVICE/{service_name}/.env
+
+DB_URL=postgresql://{id}:{pwd}@{ip}:{port}/{dbname}
+SCHEMA=schema1, schema2, ...
+
\ No newline at end of file
diff --git a/API_SERVICE/meta_service/ELKSearch/README.md b/API_SERVICE/batch_service/app/ELKSearch/README.md
similarity index 100%
rename from API_SERVICE/meta_service/ELKSearch/README.md
rename to API_SERVICE/batch_service/app/ELKSearch/README.md
diff --git a/API_SERVICE/common_service/routes/__init__.py b/API_SERVICE/batch_service/app/ELKSearch/Utils/__init__.py
similarity index 100%
rename from API_SERVICE/common_service/routes/__init__.py
rename to API_SERVICE/batch_service/app/ELKSearch/Utils/__init__.py
diff --git a/API_SERVICE/meta_service/ELKSearch/Utils/base.py b/API_SERVICE/batch_service/app/ELKSearch/Utils/base.py
similarity index 76%
rename from API_SERVICE/meta_service/ELKSearch/Utils/base.py
rename to API_SERVICE/batch_service/app/ELKSearch/Utils/base.py
index f167133f..1346c3ce 100644
--- a/API_SERVICE/meta_service/ELKSearch/Utils/base.py
+++ b/API_SERVICE/batch_service/app/ELKSearch/Utils/base.py
@@ -3,8 +3,8 @@
from elasticsearch import Elasticsearch
-def set_els(server_info):
- return Elasticsearch(f"http://{server_info.host}:{server_info.port}")
+def set_els(host: str, port: int):
+ return Elasticsearch(f"http://{host}:{port}")
def make_format(key, inner_key, value) -> dict:
diff --git a/API_SERVICE/meta_service/ELKSearch/Utils/document_utils.py b/API_SERVICE/batch_service/app/ELKSearch/Utils/document_utils.py
similarity index 100%
rename from API_SERVICE/meta_service/ELKSearch/Utils/document_utils.py
rename to API_SERVICE/batch_service/app/ELKSearch/Utils/document_utils.py
diff --git a/API_SERVICE/common_service/routes/v1/__init__.py b/API_SERVICE/batch_service/app/ELKSearch/__init__.py
similarity index 100%
rename from API_SERVICE/common_service/routes/v1/__init__.py
rename to API_SERVICE/batch_service/app/ELKSearch/__init__.py
diff --git a/API_SERVICE/meta_service/ELKSearch/document.py b/API_SERVICE/batch_service/app/ELKSearch/document.py
similarity index 95%
rename from API_SERVICE/meta_service/ELKSearch/document.py
rename to API_SERVICE/batch_service/app/ELKSearch/document.py
index ef1085f7..ccf059d1 100644
--- a/API_SERVICE/meta_service/ELKSearch/document.py
+++ b/API_SERVICE/batch_service/app/ELKSearch/document.py
@@ -1,5 +1,5 @@
-from meta_service.ELKSearch.Utils.base import make_format
-from meta_service.ELKSearch.Utils.document_utils import set_source
+from batch_service.app.ELKSearch.Utils.base import make_format
+from batch_service.app.ELKSearch.Utils.document_utils import set_source
class DocumentManager:
diff --git a/API_SERVICE/batch_service/app/ELKSearch/index.py b/API_SERVICE/batch_service/app/ELKSearch/index.py
new file mode 100644
index 00000000..17cda571
--- /dev/null
+++ b/API_SERVICE/batch_service/app/ELKSearch/index.py
@@ -0,0 +1,39 @@
+import os
+import json
+# ELKSearch 경로
+ELKSearch_PATH = os.path.dirname(os.path.abspath(__file__))
+
+
+class Index:
+ def __init__(self, connect):
+ self.connect = connect
+
+ def all_index(self) -> dict:
+ """
+ :return: key값이 index명, value는 alias 설정 값이다
+ """
+ return self.connect.indices.get_alias(index="*")
+
+ def create(self, index: str, path: str = None) -> dict:
+ """
+ :param index: 생성할 index 이름
+ :param path: 생성할 index의 mapping 폴더 위치
+ :return: els에 요청한 결과 성공/실패는 bool 타입으로 반환 된다
+ """
+ file_name = f"{index}.json"
+ if path is None:
+ path = f"{ELKSearch_PATH}/mapping/{file_name}"
+ else:
+ path = os.path.join(path, file_name)
+
+ with open(path, "r") as fp:
+ mapping = json.load(fp)
+
+ return self.connect.indices.create(index=index, body=mapping)
+
+ def delete(self, index: str) -> dict:
+ """
+ :param index: 삭제할 index의 이름
+ :return: els에 요청한 결과 성공/실패는 bool 타입으로 반환 된다
+ """
+ return self.connect.indices.delete(index=index, ignore=[400,404])
diff --git a/API_SERVICE/meta_service/ELKSearch/model.py b/API_SERVICE/batch_service/app/ELKSearch/model.py
similarity index 100%
rename from API_SERVICE/meta_service/ELKSearch/model.py
rename to API_SERVICE/batch_service/app/ELKSearch/model.py
diff --git a/API_SERVICE/login_service/__init__.py b/API_SERVICE/batch_service/app/__init__.py
similarity index 100%
rename from API_SERVICE/login_service/__init__.py
rename to API_SERVICE/batch_service/app/__init__.py
diff --git a/API_SERVICE/login_service/common/__init__.py b/API_SERVICE/batch_service/app/common/__init__.py
similarity index 100%
rename from API_SERVICE/login_service/common/__init__.py
rename to API_SERVICE/batch_service/app/common/__init__.py
diff --git a/API_SERVICE/batch_service/app/common/config.py b/API_SERVICE/batch_service/app/common/config.py
new file mode 100644
index 00000000..69eb0b10
--- /dev/null
+++ b/API_SERVICE/batch_service/app/common/config.py
@@ -0,0 +1,156 @@
+import logging.config
+import os
+from functools import lru_cache
+
+from pydantic import BaseSettings, PostgresDsn, Field
+
+base_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+print(f"batch base_dir :: {base_dir}")
+
+
+class DBInfo(BaseSettings):
+ DB_POOL_RECYCLE: int = Field(default=900)
+ DB_ECHO: bool = Field(default=False)
+
+
+class PGInfo(DBInfo):
+ DB_URL: str
+ SCHEMA: str
+
+ class Config: # TODO: Config 사용없이 개별로 env 읽어서 할당
+ env_file = f"{base_dir}/.env"
+ env_file_encoding = "utf-8"
+
+
+class SeoulPGInfo(PGInfo):
+ SEOUL_DB_URL: str = Field(..., validation_alias="DB_URL")
+ SEOUL_SCHEMA: str = Field(..., validation_alias="SCHEMA")
+
+
+class ELSInfo(BaseSettings):
+ ELS_HOST: str = Field(..., alias="host")
+ ELS_PORT: int = Field(..., alias="port")
+
+ class Config:
+ env_file = f"{base_dir}/.env"
+ env_file_encoding = "utf-8"
+
+
+class Settings(BaseSettings):
+ BASE_DIR = base_dir
+ RELOAD: bool
+ TESTING: bool
+
+ EMAIL_ADDR: str
+ EMAIL_PASSWORD: str
+ SMTP_SERVER: str
+ SMTP_PORT: str
+
+ DB_INFO: DBInfo
+ SEOUL_DB_INFO: SeoulPGInfo
+ ELS_INFO: ELSInfo
+
+
+class ProdSettings(Settings):
+ SERVICE = "batch"
+ RELOAD = False
+ TESTING = False
+
+ DB_INFO = PGInfo()
+ SEOUL_DB_INFO = SeoulPGInfo()
+ ELS_INFO = ELSInfo()
+
+ class Config:
+ env_file = f"{base_dir}/.env"
+ env_file_encoding = "utf-8"
+
+
+class LocalSettings(Settings):
+ SERVICE = "batch"
+ TESTING: bool = False
+ RELOAD: bool = False
+
+ SMTP_SERVER = "smtp.office365.com"
+ SMTP_PORT = "587"
+ EMAIL_ADDR = "admin@bigdata-car.kr"
+ EMAIL_PASSWORD = "Pas07054354@katech!"
+
+ DB_INFO = PGInfo(
+ DB_POOL_RECYCLE=900,
+ DB_ECHO=False,
+ SCHEMA="sitemng,users,meta,iag,ckan,board,analysis",
+ DB_URL=str(
+ PostgresDsn.build(
+ scheme="postgresql",
+ host="192.168.100.126",
+ port="25432",
+ user="dpmanager",
+ password="hello.dp12#$",
+ path="/dataportal",
+ )
+ ),
+ )
+
+ SEOUL_DB_INFO = SeoulPGInfo(
+ DB_POOL_RECYCLE=900,
+ DB_ECHO=False,
+ SCHEMA="public",
+ DB_URL=str(
+ PostgresDsn.build(
+ scheme="postgresql",
+ host="147.47.200.145",
+ port="34543",
+ user="openplatform",
+ password="openplatform",
+ path="/katechdb",
+ )
+ ),
+ )
+
+ ELS_INFO = ELSInfo(host="192.168.101.44", port=39200)
+
+
+class TestSettings(LocalSettings):
+ SERVICE = "batch"
+ TESTING = True
+ RELOAD = True
+
+
+@lru_cache
+def get_settings() -> Settings:
+ env = os.getenv("APP_ENV", "prod")
+ print(env)
+ return {"local": LocalSettings(), "test": TestSettings(), "prod": ProdSettings()}[env]
+
+
+settings = get_settings()
+print(settings)
+
+log_config = {
+ "version": 1,
+ "disable_existing_loggers": False,
+ "formatters": {
+ "standard": {"format": "%(asctime)s %(levelname)s [%(filename)s:%(lineno)d] - %(message)s"},
+ },
+ "handlers": {
+ "console_handler": {
+ "class": "logging.StreamHandler",
+ "level": "DEBUG",
+ "formatter": "standard",
+ },
+ },
+ "root": {"level": "DEBUG", "handlers": ["console_handler"], "propagate": False},
+}
+
+if "prod" == os.getenv("APP_ENV", "prod"):
+ log_config["handlers"]["file_handler"] = {
+ "class": "logging.handlers.RotatingFileHandler",
+ "filename": os.path.join(base_dir, "log", "batch.log"),
+ "mode": "a",
+ "maxBytes": 20000000,
+ "backupCount": 10,
+ "level": "INFO",
+ "formatter": "standard",
+ }
+ log_config["root"]["handlers"].append("file_handler")
+logging.config.dictConfig(log_config)
diff --git a/API_SERVICE/batch_service/app/common/const.py b/API_SERVICE/batch_service/app/common/const.py
new file mode 100644
index 00000000..ebf1581e
--- /dev/null
+++ b/API_SERVICE/batch_service/app/common/const.py
@@ -0,0 +1,85 @@
+import os
+
+from batch_service.app.common.config import base_dir
+from libs.database.dml_controller import Base
+
+# recommend
+log_dir = f"{os.path.dirname(os.path.dirname(base_dir))}/API_SERVICE/meta_service/log"
+
+# send_email
+template_dir = os.path.join(base_dir, "resources", "template")
+msg_setting = {
+ "register": {
+ "tmplt": os.path.join(template_dir, "emailAthnSend.html"),
+ "sub": "[자동차데이터포털]회원가입을 위한 인증 메일입니다.",
+ },
+ "password": {
+ "tmplt": os.path.join(template_dir, "pwdEmailAthn.html"),
+ "sub": "[자동차데이터포털]비밀번호 변경을 위한 인증 메일입니다.",
+ },
+ "share": {
+ "tmplt": os.path.join(template_dir, "shareEmail.html"),
+ "sub": "[자동차데이터포털] 자동차데이터포털에서 공유한 데이터입니다.",
+ },
+ "noty": {
+ "tmplt": os.path.join(template_dir, "notyEmail.html"),
+ "sub": "[자동차데이터포털] 자동차데이터포털에서 보내는 알림 메일입니다.",
+ },
+ "analysisRequest": {
+ "tmplt": os.path.join(template_dir, "analysisRequestEmail.html"),
+ "sub": "[자동차데이터포털] {0} 신청 메일입니다.",
+ },
+ "toolApply": {
+ "tmplt": os.path.join(template_dir, "toolApplyEmail.html"),
+ "sub": "[자동차데이터포털] {0} 신청 메일입니다.",
+ },
+}
+
+
+class EmailSendInfoTable(Base):
+ table_nm = "tb_email_send_info"
+ key_column = "email_id"
+
+ def get_query_data(self, st_time) -> dict:
+ return {
+ "table_nm": self.table_nm,
+ "where_info": [
+ {
+ "table_nm": self.table_nm,
+ "key": "sttus",
+ "value": "REQ",
+ "compare_op": "=",
+ "op": "",
+ },
+ {"table_nm": self.table_nm, "key": "reg_date", "value": st_time, "compare_op": ">=", "op": "AND"},
+ ],
+ }
+
+
+# els update
+class BizDataTable(Base):
+ table_nm = "v_biz_meta_info"
+ key_column = "status"
+
+
+class CkanDataTable(Base):
+ # 해외데이터
+ table_nm = "v_biz_meta_oversea_els"
+ key_column = "biz_dataset_id"
+
+
+# 추천 검색어
+class RecommendKeyTable(Base):
+ table_nm = "tb_recommend_keyword"
+ key_column = "keyword"
+
+
+# 서울대 데이터
+class SeoulDataKor(Base):
+ table_nm = "tbdataset_total_95_kor"
+ key_column = "ds_id"
+
+
+class SeoulDataWorld(Base):
+ table_nm = "tbdataset_total_95_world"
+ key_column = "ds_id"
diff --git a/API_SERVICE/batch_service/app/common/utils.py b/API_SERVICE/batch_service/app/common/utils.py
new file mode 100644
index 00000000..5567651b
--- /dev/null
+++ b/API_SERVICE/batch_service/app/common/utils.py
@@ -0,0 +1,48 @@
+import re
+from copy import deepcopy
+
+from batch_service.app.ELKSearch.Utils.base import set_els
+from batch_service.app.ELKSearch.document import DocumentManager
+from batch_service.app.ELKSearch.index import Index
+
+
+def default_search_set(host, port, index, size=10, from_=0):
+ """
+ 검색에 필요한 default 세팅
+ 자동완성과 검색에 사용
+ """
+ es = set_els(host, port)
+ docmanger = DocumentManager(es, index)
+ docmanger.set_pagination(size, from_)
+ return docmanger
+
+
+def index_set(host, port):
+ es = set_els(host, port)
+ return Index(es)
+
+
+def data_process(data):
+ pre_data = deepcopy(data)
+ for k, v in data.items():
+ if not v:
+ continue
+
+ if k in ["ctgry", "data_shap", "data_prv_desk"]:
+ key = f"re_{k}"
+ pre_data[key] = re.sub("[ ]", "", str(v))
+
+ if isinstance(v, str):
+ match = re.match(r"(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2})\.(\d+)", v)
+ if match:
+ date_time_field = match.group(1).replace(" ", "T")
+ micro_time_field = match.group(2)
+
+ if "+" in micro_time_field:
+ micro_time_field = micro_time_field.split("+")[0]
+ if len(micro_time_field) < 6:
+ micro_time_field = micro_time_field + "0"
+
+ pre_data[k] = f"{date_time_field}.{micro_time_field}"
+
+ return {"_id": pre_data["biz_dataset_id"], "_source": pre_data}
diff --git a/API_SERVICE/login_service/database/__init__.py b/API_SERVICE/batch_service/app/database/__init__.py
similarity index 100%
rename from API_SERVICE/login_service/database/__init__.py
rename to API_SERVICE/batch_service/app/database/__init__.py
diff --git a/API_SERVICE/batch_service/app/database/conn.py b/API_SERVICE/batch_service/app/database/conn.py
new file mode 100644
index 00000000..05ffeac9
--- /dev/null
+++ b/API_SERVICE/batch_service/app/database/conn.py
@@ -0,0 +1,6 @@
+from sqlalchemy.ext.automap import automap_base
+from libs.database.orm import SQLAlchemyConnector
+
+Base = automap_base()
+db = SQLAlchemyConnector(Base)
+# seoul_db = SQLAlchemyConnector(Base)
diff --git a/API_SERVICE/common_service/database/models.py b/API_SERVICE/batch_service/app/database/models.py
similarity index 100%
rename from API_SERVICE/common_service/database/models.py
rename to API_SERVICE/batch_service/app/database/models.py
diff --git a/API_SERVICE/login_service/routes/__init__.py b/API_SERVICE/batch_service/app/jobs/__init__.py
similarity index 100%
rename from API_SERVICE/login_service/routes/__init__.py
rename to API_SERVICE/batch_service/app/jobs/__init__.py
diff --git a/API_SERVICE/batch_service/app/jobs/els_update.py b/API_SERVICE/batch_service/app/jobs/els_update.py
new file mode 100644
index 00000000..49d941fc
--- /dev/null
+++ b/API_SERVICE/batch_service/app/jobs/els_update.py
@@ -0,0 +1,86 @@
+import logging
+from datetime import datetime, timedelta
+
+from batch_service.app.common.config import settings, base_dir
+from batch_service.app.common.const import BizDataTable, CkanDataTable
+from batch_service.app.common.utils import default_search_set, data_process, index_set
+from batch_service.app.database.conn import db
+
+logger = logging.getLogger()
+
+
+# els update condition
+def time_check_cond(query, table_nm, date, op=""):
+ query["where_info"].append({
+ "table_nm": table_nm,
+ "key": "modified_dt",
+ "value": date,
+ "compare_op": ">=",
+ "op": op,
+ })
+ return query
+
+
+# todo: ckan이랑 중복되는 코드 함수로 변경
+def insert_meta(retv_update=False):
+ index_nm = "biz_meta"
+ els_host = settings.ELS_INFO.ELS_HOST
+ els_port = settings.ELS_INFO.ELS_PORT
+ try:
+ if not retv_update:
+ index = index_set(host=els_host, port=els_port)
+ index.delete(index_nm)
+ index.create(index_nm, path=f"{base_dir}/resources/mapping")
+
+ docmanager = default_search_set(host=els_host, port=els_port, index=index_nm)
+
+ with db.get_db_manager() as session:
+ query = BizDataTable.get_select_query("D")
+ if retv_update:
+ st = datetime.today() - timedelta(minutes=30)
+ today = st.strftime("%Y-%m-%d %H:%M:00")
+ query = time_check_cond(query, BizDataTable.table_nm, today, "and")
+ meta_list = session.query(**query).all()[0]
+
+ logger.info(len(meta_list))
+
+ for meta in meta_list:
+ insert_body = data_process(meta)
+ docmanager.set_body(insert_body["_source"])
+ res = docmanager.insert(insert_body["_id"])
+ logger.info(res)
+ except Exception as e:
+ print(e)
+
+
+def insert_ckan(retv_update=False):
+ index_nm = "v_biz_meta_oversea_els"
+ els_host = settings.ELS_INFO.ELS_HOST
+ els_port = settings.ELS_INFO.ELS_PORT
+ try:
+ if not retv_update:
+ index = index_set(host=els_host, port=els_port)
+ index.delete(index_nm)
+ index.create(index_nm, path=f"{base_dir}/resources/mapping")
+
+ docmanager = default_search_set(host=els_host, port=els_port, index=index_nm)
+
+ with db.get_db_manager() as session:
+ query = CkanDataTable.get_select_query("")
+ if retv_update:
+ st = datetime.today() - timedelta(minutes=30)
+ today = st.strftime("%Y-%m-%d %H:%M:00")
+ query = time_check_cond(query, CkanDataTable.table_nm, today, "and")
+ query["where_info"] = query["where_info"][1:]
+ else:
+ query.pop("where_info")
+ oversea_list = session.query(**query).all()[0]
+ logger.info(len(oversea_list))
+
+ for oversea in oversea_list:
+ insert_body = data_process(oversea)
+ docmanager.set_body(insert_body["_source"])
+ res = docmanager.insert(insert_body["_id"])
+ logger.info(res)
+ except Exception as e:
+ print(e)
diff --git a/API_SERVICE/batch_service/app/jobs/recommend_word.py b/API_SERVICE/batch_service/app/jobs/recommend_word.py
new file mode 100644
index 00000000..b260e3c7
--- /dev/null
+++ b/API_SERVICE/batch_service/app/jobs/recommend_word.py
@@ -0,0 +1,44 @@
+import ast
+import logging
+import re
+from collections import Counter
+from datetime import datetime
+
+from batch_service.app.common.config import base_dir
+from batch_service.app.common.const import log_dir, RecommendKeyTable
+from batch_service.app.database.conn import db
+
+logger = logging.getLogger()
+
+
+def recommend_search_word():
+ """ """
+ with db.get_db_manager() as session:
+ # 검색어 로그 불러오기
+ search_file_name = f"{log_dir}/{datetime.today().date().strftime('%Y%m%d')}_search.log"
+ with open(search_file_name, "r") as fp:
+ search_log_file = fp.read().split("\n")[:-1]
+
+ # 필터링할 단어 리스트 불러오기
+ fword_file_name = f"{base_dir}/batch_service/common/bad_word.txt"
+ with open(fword_file_name, "r") as fp:
+ bad_word_list = fp.read().split("\n")
+
+ # 자모만 들어가 있는 오타 제외
+ today_search_word = []
+ for words in search_log_file:
+ result = [word for word in ast.literal_eval(words) if re.search("[ㄱ-ㅎㅏ-ㅣ]", word) is None]
+ today_search_word = today_search_word + result
+
+ # 단어 필터링
+ today_search_word = [word for word in today_search_word if word not in bad_word_list]
+ today_search_word = Counter(today_search_word)
+
+ for word, cnt in today_search_word.most_common(10):
+ data = {"keyword": word, "count": cnt, "use_yn": "N"}
+ if session.query(**RecommendKeyTable.get_select_query(word)).first():
+ session.execute(**RecommendKeyTable.get_execute_query("update", data))
+ else:
+ session.execute(**RecommendKeyTable.get_execute_query("insert", data))
+
+ # session.execute(**RecommendKeyTable.get_execute_query("upsert", update_data))
diff --git a/API_SERVICE/batch_service/app/jobs/send_email.py b/API_SERVICE/batch_service/app/jobs/send_email.py
new file mode 100644
index 00000000..43afe43e
--- /dev/null
+++ b/API_SERVICE/batch_service/app/jobs/send_email.py
@@ -0,0 +1,66 @@
+import logging
+import smtplib
+from datetime import datetime, timedelta
+
+from email.mime.text import MIMEText
+from email.mime.multipart import MIMEMultipart
+from batch_service.app.database.conn import db
+from batch_service.app.common.const import EmailSendInfoTable, msg_setting
+from batch_service.app.common.config import settings
+
+logger = logging.getLogger()
+
+
+def send_mail():
+ st_time = datetime.today() - timedelta(minutes=1)
+ st_time = st_time.strftime("%Y-%m-%d %H:%M:00")
+
+ with db.get_db_manager() as session:
+ email_send_table = EmailSendInfoTable()
+ rows = session.query(**email_send_table.get_query_data(st_time)).all()
+ rows = rows[0] if rows else []
+
+ for row in rows:
+
+ # send config
+ host = settings.SMTP_SERVER
+ port = settings.SMTP_PORT
+ from_ = settings.EMAIL_ADDR
+ password = settings.EMAIL_PASSWORD
+ category = msg_setting[row["tmplt_cd"]]
+
+ # template
+ with open(category["tmplt"],"r") as fp:
+ html = "\n".join(fp.readlines())
+
+ # replace
+ # todo 함수화
+ if row["tmplt_cd"] in ["register","password","share"]:
+ html = html.replace("CONTENTS1", row['contents'])
+ else:
+ content = row["contents"].split("|")
+ html = html.replace("TITLE", row['title'])
+ html = html.replace("CONTENTS1", content[0])
+ html = html.replace("CONTENTS2", content[1])
+
+ if category in ["analysisRequest", "toolApply"]:
+ category["sub"] = category["sub"].format(row["title"])
+
+ # send
+ message = MIMEMultipart("alternative")
+ message["Subject"] = category["sub"]
+ message["From"] = from_
+ message["To"] = row["rcv_adr"]
+ html_part = MIMEText(html, "html")
+ message.attach(html_part)
+
+ stmp = smtplib.SMTP(host=host, port=port)
+ stmp.ehlo()
+ stmp.starttls()
+ stmp.login(from_, password)
+ stmp.send_message(message)
+ stmp.quit()
+
+ # update
+ row["sttus"] = "SEND"
+ session.execute(**EmailSendInfoTable.get_execute_query("UPDATE", row))
diff --git a/API_SERVICE/batch_service/app/jobs/seoul_db_upload.py b/API_SERVICE/batch_service/app/jobs/seoul_db_upload.py
new file mode 100644
index 00000000..63b2ce49
--- /dev/null
+++ b/API_SERVICE/batch_service/app/jobs/seoul_db_upload.py
@@ -0,0 +1,42 @@
+import logging
+from datetime import datetime
+
+from batch_service.app.common.const import SeoulDataKor, SeoulDataWorld
+from batch_service.app.database.conn import seoul_db, db
+
+logger = logging.getLogger()
+
+
+def insert_ddr(kor_check: bool = True):
+ # seoul_db -> katech_db
+ try:
+ if kor_check:
+ table = SeoulDataKor
+ else:
+ table = SeoulDataWorld
+
+ query = table.get_select_query("")
+ query.pop("where_info")
+
+ with seoul_db.get_db_manager() as session:
+ dataset = session.query(**query).all()[0]
+
+ logger.info(len(dataset))
+
+ with db.get_db_manager() as sess:
+ for data_dict in dataset:
+ check_query = table.get_select_query(data_dict[table.key_column])
+ if sess.query(**check_query).first():
+ # update
+ logger.info("update")
+ query = table.get_execute_query("update",data_dict)
+ else:
+ #insert
+ logger.info("insert")
+ query = table.get_execute_query("insert",data_dict)
+
+ logger.info(sess.execute(**query))
+
+ except Exception as e:
+ logger.info(data_dict)
+ print(e)
diff --git a/API_SERVICE/batch_service/app/main.py b/API_SERVICE/batch_service/app/main.py
new file mode 100644
index 00000000..2206a9fa
--- /dev/null
+++ b/API_SERVICE/batch_service/app/main.py
@@ -0,0 +1,58 @@
+import uvicorn
+from apscheduler.schedulers.background import BackgroundScheduler
+from fastapi import FastAPI
+
+from batch_service.app.common.config import settings
+from batch_service.app.database.conn import db #,seoul_db
+from batch_service.app.jobs import send_email, recommend_word, els_update#, seoul_db_upload
+# from batch_service.app.routes.v1 import els_update, seoul_update
+
+"""
+서울대 DB connection Error 발생 이슈로 관련 코드 전부 주석처리
+"""
+
+def create_app():
+ app_ = FastAPI()
+ print(settings.dict())
+ db.init_app(app_, **settings.dict())
+
+ # seoul_db.init_app(app_, DB_INFO=settings.SEOUL_DB_INFO.dict(by_alias=True), **settings.dict(exclude={"DB_INFO"}))
+
+ # app_.include_router(els_update.router, prefix="/portal/api/batch")
+
+ return app_
+
+
+app = create_app()
+
+# 임시 생성 TODO: 객체로 관리 후 주입?
+scheduler = BackgroundScheduler()
+
+
+@app.on_event("startup")
+def _app_startup():
+
+ scheduler.add_job(send_email.send_mail, "cron", second="*/5", id="email")
+ scheduler.add_job(recommend_word.recommend_search_word, "cron", hour="23", minute="59", id="recommend")
+
+ # els update
+ scheduler.add_job(els_update.insert_meta, "cron", args=[False], hour="00", minute="15", id="update_meta")
+ scheduler.add_job(els_update.insert_ckan, "cron", args=[False], hour="00", minute="40", id="update_ckan")
+
+ scheduler.add_job(els_update.insert_meta, "cron", args=[True], minute="*/5", id="update_meta_retv")
+ scheduler.add_job(els_update.insert_ckan, "cron", args=[True], minute="*/5", id="update_ckan_retv")
+
+ # 서울대 DB 데이터 확인후 활성화 필요
+ # scheduler.add_job(seoul_db_upload.update_ddr, "cron", hour="23", minute="59", id="update_ddr")
+ # scheduler.add_job(seoul_db_upload.update_rr, "cron", hour="23", minute="59", id="update_rr")
+
+ scheduler.start()
+
+
+@app.on_event("shutdown")
+def _app_shutdown():
+ scheduler.shutdown()
+
+
+if __name__ == "__main__":
+ uvicorn.run("main:app", host="0.0.0.0", port=8090, reload=True)
diff --git a/API_SERVICE/login_service/routes/v1/__init__.py b/API_SERVICE/batch_service/app/routes/__init__.py
similarity index 100%
rename from API_SERVICE/login_service/routes/v1/__init__.py
rename to API_SERVICE/batch_service/app/routes/__init__.py
diff --git a/API_SERVICE/meta_service/ELKSearch/Utils/__init__.py b/API_SERVICE/batch_service/app/routes/v1/__init__.py
similarity index 100%
rename from API_SERVICE/meta_service/ELKSearch/Utils/__init__.py
rename to API_SERVICE/batch_service/app/routes/v1/__init__.py
diff --git a/API_SERVICE/batch_service/app/routes/v1/els_update.py b/API_SERVICE/batch_service/app/routes/v1/els_update.py
new file mode 100644
index 00000000..7b1c439b
--- /dev/null
+++ b/API_SERVICE/batch_service/app/routes/v1/els_update.py
@@ -0,0 +1,70 @@
+from datetime import datetime
+
+from fastapi.logger import logger
+from fastapi import APIRouter, Depends
+
+from batch_service.app.common.const import BizDataTable, CkanDataTable, SeoulDataKor, SeoulDataWorld
+from batch_service.app.common.utils import default_search_set, data_process, index_set
+from batch_service.app.database.conn import db, seoul_db
+
+from libs.database.orm import Executor
+
+router = APIRouter()
+
+
+@router.get("/update_meta_els")
+async def meta_test(session: Executor = Depends(db.get_db)):
+ index_nm = "biz_meta"
+ els_host = "10.10.10.62"
+ els_port = "39200"
+ try:
+ docmanager = default_search_set(host=els_host, port=els_port, index=index_nm)
+
+ query = BizDataTable.get_select_query("D")
+ today = datetime.today().strftime("%Y-%m-%d %H:%M:00")
+ query = time_check_cond(query, BizDataTable.table_nm, today, "and")
+ meta_list = session.query(**query).all()[0]
+ logger.info(len(meta_list))
+
+ for meta in meta_list:
+ insert_body = data_process(meta)
+ docmanager.set_body(insert_body["_source"])
+ res = docmanager.insert(insert_body["_id"])
+ logger.info(res)
+ except Exception as e:
+ print(e)
+
+
+@router.get("/update_oversea_els")
+async def oversea_test(session: Executor = Depends(db.get_db)):
+ index_nm = "v_biz_meta_oversea_els"
+ els_host = "10.10.10.62"
+ els_port = "39200"
+ try:
+ index = index_set(host=els_host, port=els_port)
+ docmanager = default_search_set(host=els_host, port=els_port, index=index_nm)
+
+ query = CkanDataTable.get_select_query("")
+ query.pop("where_info")
+ oversea_list = session.query(**query).all()[0]
+ logger.info(len(oversea_list))
+
+ for oversea in oversea_list:
+ insert_body = data_process(oversea)
+ docmanager.set_body(insert_body["_source"])
+ res = docmanager.insert(insert_body["_id"])
+ logger.info(res)
+ except Exception as e:
+ print(e)
+
+
+# els update condition
+def time_check_cond(query, table_nm, date, op=""):
+ query["where_info"].append({
+ "table_nm": table_nm,
+ "key": "modified_dt",
+ "value": date,
+ "compare_op": ">=",
+ "op": op,
+ })
+ return query
diff --git a/API_SERVICE/batch_service/app/routes/v1/seoul_update.py b/API_SERVICE/batch_service/app/routes/v1/seoul_update.py
new file mode 100644
index 00000000..0fc57918
--- /dev/null
+++ b/API_SERVICE/batch_service/app/routes/v1/seoul_update.py
@@ -0,0 +1,47 @@
+from datetime import datetime
+
+from fastapi.logger import logger
+from fastapi import APIRouter, Depends
+
+from batch_service.app.common.const import BizDataTable, CkanDataTable, SeoulDataKor, SeoulDataWorld
+from batch_service.app.database.conn import db, seoul_db
+
+from libs.database.orm import Executor
+
+router = APIRouter()
+
+
+@router.post("/update_seoul_db")
+async def seoul_test(kor_check: bool = True, session: Executor = Depends(db.get_db)):
+ # seoul_db -> katech_db
+ st = datetime.now()
+ try:
+ if kor_check:
+ table = SeoulDataKor
+ else:
+ table = SeoulDataWorld
+
+ query = table.get_select_query("")
+ query.pop("where_info")
+ with seoul_db.get_db_manager() as sess:
+ dataset = sess.query(**query).all()[0]
+ logger.info(len(dataset))
+
+ for data_dict in dataset:
+ check_query = table.get_select_query(data_dict[table.key_column])
+ if session.query(**check_query).first():
+ # update
+ query = table.get_execute_query("update",data_dict)
+ logger.info(data_dict["ds_id"])
+ else:
+ #insert
+ query = table.get_execute_query("insert",data_dict)
+ logger.info(data_dict["ds_id"])
+
+ session.execute(**query)
+ et = datetime.now()
+ logger.info(len(dataset))
+ logger.info(et - st)
+
+ except Exception as e:
+ print(e)
\ No newline at end of file
diff --git a/API_SERVICE/batch_service/app/routes/v1/temp.py b/API_SERVICE/batch_service/app/routes/v1/temp.py
new file mode 100644
index 00000000..757bbdab
--- /dev/null
+++ b/API_SERVICE/batch_service/app/routes/v1/temp.py
@@ -0,0 +1,10 @@
+from fastapi import APIRouter
+from starlette.responses import JSONResponse
+
+
+router = APIRouter()
+
+
+@router.get("/")
+async def healthcheck():
+ return JSONResponse(status_code=200, content="ok")
diff --git a/API_ROUTER/gunicorn.conf.py b/API_SERVICE/batch_service/gunicorn.conf.py
similarity index 95%
rename from API_ROUTER/gunicorn.conf.py
rename to API_SERVICE/batch_service/gunicorn.conf.py
index 3b1d2cdc..500bb22e 100644
--- a/API_ROUTER/gunicorn.conf.py
+++ b/API_SERVICE/batch_service/gunicorn.conf.py
@@ -17,7 +17,7 @@
# range.
#
-bind = "0.0.0.0:8010"
+bind = "0.0.0.0:8000"
backlog = 2048
#
@@ -67,8 +67,8 @@
#
# This setting is intended for development. It will cause
# workers to be restarted whenever application code changes.
-workers = 3
-threads = 3
+workers = 1
+threads = 1
worker_class = "uvicorn.workers.UvicornWorker"
worker_connections = 1000
timeout = 60
@@ -127,7 +127,7 @@
#
daemon = False
-pidfile = "./gunicorn-router.pid"
+pidfile = "./gunicorn-batch.pid"
umask = 0
user = None
group = None
@@ -144,10 +144,10 @@
#
# A string of "debug", "info", "warning", "error", "critical"
#
-logfile = "./router.log"
-errorlog = "./router-error.log"
+logfile = "./log/gunicorn-batch.log"
+errorlog = "./log/gunicorn-batch-error.log"
loglevel = "info"
-accesslog = "./router.log"
+accesslog = "./log/gunicorn-batch.log"
access_log_format = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"'
#
@@ -163,7 +163,7 @@
# A string or None to choose a default of something like 'gunicorn'.
#
-proc_name = "API-Router"
+proc_name = "API-Service-batch"
#
@@ -219,4 +219,4 @@ def worker_int(worker):
def worker_abort(worker):
- worker.log.info("worker received SIGABRT signal")
\ No newline at end of file
+ worker.log.info("worker received SIGABRT signal")
diff --git a/API_SERVICE/batch_service/gunicorn.sh b/API_SERVICE/batch_service/gunicorn.sh
new file mode 100755
index 00000000..9c53a692
--- /dev/null
+++ b/API_SERVICE/batch_service/gunicorn.sh
@@ -0,0 +1,62 @@
+#!/bin/bash
+
+root_path="$( cd "$( dirname "$0" )" && pwd -P )"
+pid_path="$root_path/gunicorn-batch.pid"
+
+echo $pid_path
+
+# gunicorn 실행 명령어
+start_gunicorn() {
+ gunicorn app.main:app --bind 0.0.0.0:23000 -c gunicorn.conf.py -D --pid $pid_path
+ sleep 2
+ pid=$(cat $pid_path)
+ echo "Gunicorn started. PID: $pid"
+}
+
+# gunicorn 중지 명령어
+stop_gunicorn() {
+ if [ -f $pid_path ]; then
+ pid=$(cat $pid_path)
+ kill "$pid"
+ rm $pid_path
+ echo "Gunicorn stopped. PID: $pid"
+ else
+ echo "Gunicorn is not running."
+ fi
+}
+
+# gunicorn 재실행 명령어
+restart_gunicorn() {
+ stop_gunicorn
+ start_gunicorn
+}
+
+# gunicorn 실행 상태 확인
+status_gunicorn() {
+ if [ -f $pid_path ]; then
+ pid=$(cat $pid_path)
+ echo "Gunicorn is running. PID: $pid"
+ else
+ echo "Gunicorn is not running."
+ fi
+}
+
+# 스크립트 옵션 처리
+case "$1" in
+ start)
+ start_gunicorn
+ ;;
+ stop)
+ stop_gunicorn
+ ;;
+ restart)
+ restart_gunicorn
+ ;;
+ status)
+ status_gunicorn
+ ;;
+ *)
+ echo "Usage: $0 {start|stop|restart|status}"
+ exit 1
+ ;;
+esac
\ No newline at end of file
diff --git a/API_SERVICE/batch_service/requirements.txt b/API_SERVICE/batch_service/requirements.txt
new file mode 100644
index 00000000..b76eb6f1
--- /dev/null
+++ b/API_SERVICE/batch_service/requirements.txt
@@ -0,0 +1,56 @@
+aiohttp==3.8.4
+aiosignal==1.3.1
+anyio==3.6.2
+APScheduler==3.10.1
+async-timeout==4.0.2
+attrs==23.1.0
+bcrypt==4.0.1
+boto3==1.28.67
+botocore==1.31.67
+certifi==2022.12.7
+cffi==1.15.1
+charset-normalizer==3.1.0
+click==8.1.3
+cryptography==40.0.2
+elastic-transport==8.4.0
+elasticsearch==8.7.0
+exceptiongroup==1.1.2
+fastapi==0.95.1
+frozenlist==1.3.3
+greenlet==2.0.2
+gunicorn==20.1.0
+h11==0.14.0
+httpcore==0.17.3
+httpx==0.24.1
+idna==3.4
+iniconfig==2.0.0
+jmespath==1.0.1
+multidict==6.0.4
+numpy==1.26.0
+packaging==23.1
+pandas==2.1.0
+passlib==1.7.4
+Pillow==10.0.1
+pluggy==1.2.0
+psycopg2-binary==2.9.9
+pycparser==2.21
+pycryptodome==3.18.0
+pydantic==1.10.7
+PyJWT==2.7.0
+pytest==7.4.0
+python-dateutil==2.8.2
+python-dotenv==1.0.0
+pytz==2023.3
+requests==2.31.0
+s3transfer==0.7.0
+six==1.16.0
+sniffio==1.3.0
+SQLAlchemy==2.0.22
+starlette==0.26.1
+tomli==2.0.1
+typing_extensions==4.5.0
+tzdata==2023.3
+tzlocal==5.0.1
+urllib3==1.26.15
+uvicorn==0.21.1
+yarl==1.8.2
diff --git a/API_SERVICE/batch_service/resources/bad_word.txt b/API_SERVICE/batch_service/resources/bad_word.txt
new file mode 100644
index 00000000..8ff9da46
--- /dev/null
+++ b/API_SERVICE/batch_service/resources/bad_word.txt
@@ -0,0 +1,1532 @@
+넌씨눈
+개새끼
+따먹었어
+ㄷㅇㅂ
+시펄
+빠구울
+쓰레기 새끼
+된장녀
+지껄이
+브랄
+십팔넘
+개씁년
+썅놈
+병크
+씨블
+졸좋
+새뀌
+찌랄
+애미랄
+니씨브랄
+죽어버려
+개너미
+zaji
+줘패
+버지뜨더
+쫀 맛
+창넘
+늬미
+개지랄
+니미기
+쇡끼
+닝기리
+ㅈ.ㄴ
+버지물마셔
+자지넣자
+보지자지
+bozi
+인간말종
+존잼
+씌벨
+존똑
+조오웃
+개쓰레기
+씨이붕
+샹년
+대갈
+십8
+똥구녁
+me췬
+쳐쑤셔박어
+슈우벌
+씨새발끼
+개후라들놈
+좆만한놈
+호냥년
+18ㅅㅔ키
+뒤졌
+개똥
+로 꺼.져
+띠불
+도랏
+성폭행
+ㅈ같네
+젖같
+족까
+젖까
+이기야
+씹새
+우미쑤셔
+조온만
+씨섹끼
+미핀놈
+한녀
+미튄
+똥
+씨벨
+싸가지없
+시팔년
+왕버지
+빠가니
+ㅈ1랄
+허졉
+족까내
+게에가튼
+샛기
+띠블넘
+누나강간
+씨바알
+개같
+존나아
+노무노무
+미친구멍
+그1켬
+뻐큐
+개샛기
+뼝신
+좋오웃
+씹창
+씨입새에
+허접
+G랄
+촌씨브라리
+개소리
+보지따먹기
+빨치산
+ㅄ
+시이붕
+보지녀
+허벌자식
+샊히
+씹탱
+슈1발
+너거애비
+좃넘
+조오지
+씨블년
+새키
+사까시
+걸레년
+애애무
+엠븽신
+좃대가리
+병신세리
+십지랄
+쳐-
+애미
+sex해
+ㅎㅃ
+시바앙
+빠가새
+내조지
+병신
+십자슥
+공지
+뒤지길
+18세ㅋㅣ
+똘아이
+shit
+닳은년
+젓떠
+존쎼
+빻은
+눈깔파
+오랄
+졏같
+졸귀
+존나게
+싸물어
+처먹
+벌창
+사까쉬
+십자석
+니뿡
+이새끼
+보라니
+손놈
+뒤져요
+좃까리
+짱개
+후1빨
+ㅅㄲ들
+정액마셔
+에미
+시이풀
+쉬불
+씨뻘
+조개따조?
+ㅂ크
+웅앵
+내버지
+십부랄
+로린
+개에걸래
+유우우방것
+새킈
+니미럴
+꼴랑
+버지쑤셔
+쉰내
+보짓물
+쌍눔
+지1뢰
+애무
+씨비
+쓰바
+시밸
+돌앗구만
+쓰래기같
+개쉐뀌
+돈년
+존트
+쓰발
+새끼라
+새1끼
+불알
+니믜
+존귘
+빠구리
+처먹고
+성괴
+친 ㅅㄲ
+씨입새
+클리토리스
+친 놈
+느금마
+시방색희
+레1친
+바주카자지
+개쩌
+개.웃
+보전깨
+보지벌리
+쌍놈
+좃만이
+빠라
+미치ㄴ
+럼들
+족같내
+존쎄
+ㅅㅐㄲㅣ
+십팔새끼
+띠이발
+아오 ㅅㅂ
+개같이
+꼴보기
+뒤져야
+꺼져요
+거지같은
+색희
+좇
+지뢀
+새끼
+미친~
+은년
+보지뚫어
+씨댕
+ㅈ리
+뒈져
+조온나
+씹덕
+젓물냄새
+망해라
+성교
+버어어지이
+미: 놈
+삼일한
+ㅈㄴ
+똥꾸뇽
+노네들
+가슴주물럭
+개거얼레
+존예
+엿이나
+쎄리
+존내
+좃빠라라
+남미새
+친 년
+뒤질
+귀두
+헐렁보지
+돌앗나
+개독
+좁밥
+난자마셔
+창놈
+꺼지세요
+착짱죽짱
+유방쪼물딱
+달달이
+세키
+보지보지
+유방주물럭
+좃간년
+봉알
+가슴빨아
+보지빨어
+덜은새끼
+십세
+ㅆㅣ바
+미놈
+돈새끼
+시이팔
+딴년
+bitch
+씨부렬
+18num
+로꺼져
+섬숭이
+보지벌려
+잡것
+젖 같
+호모
+후장꽂아
+닥치세
+시벌
+노무현
+애미보지
+애미자지
+ㅅ1발
+애에미
+보지정액
+염뵹
+닥1
+돌은넘
+ㅆㅣㅂㅏ
+설거지론
+쌔리
+엠창
+붕신
+자지구멍
+지뢰
+절라
+좋만
+ㅅ.ㅂ
+퐁퐁남
+쎄끼
+시입세에
+쉬버
+내꺼핧아
+극혐
+18놈
+시이펄
+ㄱㅐㅅㅐㄲl
+에에무
+허벌보지
+보적보
+시부럴
+상폐녀
+쓉새
+십탱구리
+쉬빡
+후우자앙
+조센징
+쉬이이
+혀로보지핧기
+씹쌔
+지1랄
+버지뚫어
+또라인
+니애뷔
+스벌
+개좆
+쌍년
+젓물
+나쁜새끼
+씹할
+시팔넘
+뒤진다
+한 년
+여자따묵기
+자기핧아
+ㅈ같
+사새끼
+지이랄
+덬
+적까
+개붕알
+개자지
+쉬붕
+시-발
+ㅆ1ㄺ
+죵나
+좆년
+개애거얼래
+씨팍
+친노마
+후려
+허덥
+엠-창
+개떡
+가슴핧아
+십셰리
+구씹
+씹자지
+곱창났
+빠네
+디졌
+D쥐고
+씨바
+뚫린입
+조가튼
+걸레보지
+쪽1바리
+병-신
+병딱
+시이불
+따먹자
+ㅌㅓㄹㅐㄱㅣ
+보지털
+막간년
+개씨발
+실프
+좃만한것
+십세리
+미친쉐이
+띠이이벌
+또오라아이
+개늠
+니뽕
+쓰레기새
+보지뜨더
+찍찍이
+씨불알
+쌍쌍보지
+젓까
+뻑유
+싑창
+씨밸
+ㅉ질한
+시팔놈
+취좃
+조오온니
+강간
+미친씨부랄
+유방쪼물럭
+새ㄲㅣ
+잠지물마셔
+빙신
+걔잡지랄
+좀마니
+미:놈
+괘새끼
+돌은새끼
+조개마셔줘
+암캐년
+괴에가튼?
+뚝배기
+색퀴
+좆새끼
+쉬이붕
+허젚
+조개벌려조
+뒷치기
+빠가냐
+운디네
+쪼녜
+자지
+존싫
+쉬박
+병맛
+시새발끼
+쌕스
+쥰트
+조개핧아줘?
+씹미랄
+후라덜
+조까
+시탱
+엠빙신
+어미강간
+시빡
+꼴값
+십탱굴이
+슈ㅣ발
+ㅆㅂ
+친놈
+졸웃
+좆만아
+십새
+걔섀
+호로자
+씨버럼
+어미쑤시자
+쒸8
+18ㅅㅔㅋㅣ
+젓밥
+호로자슥
+좃물
+여어엄
+버따리자지
+늬믜
+펨코
+촌씨브랭이
+이따위
+느그매
+머리텅
+d쥐고
+보지털뽑아
+세엑스
+젖같은
+게지랄놈
+새1키
+뻨큐
+삐걱
+🚬
+개놈
+왕털보지
+썌끼
+입 털
+쿰.척
+시발새끼
+색끼
+년놈
+영자
+늑음
+미시친발
+개걸레
+가슴쪼물딱
+등신
+써글년
+씨뎅
+맛이간년
+쌍넘
+씨입세에
+애에비
+좃도
+버어지
+개넷
+시입세
+좃까
+개젓가튼넘
+미친쇄리
+주글년
+조개보지
+죽여불고
+개후라새끼
+죶
+씹물
+개간
+씹쌔끼
+항문쑤셔
+조개쑤셔줘
+쓰파
+섹쓰
+막대쑤셔줘?
+씨벌년
+개 새끼
+ㅈㅏ위
+띠벌
+쉬밸년
+보지머리박기
+에에미
+존ㄴ나
+퐁퐁녀
+또-라-이
+죤내
+정신나갓
+시이벌
+허버리년
+드응신
+빠아구우리
+쉬팔
+쉬이팔
+jonna
+게이
+시불
+버지벌료
+노알라
+상년
+좆나
+잡년
+따아알따아리
+슈벌
+뇌1텅
+새.끼
+그켬
+졸잼
+맛간년
+보슬아치
+개아기
+보지구녕
+거지같
+빠간가
+트랜스젠더
+대에가리
+글러먹
+첫빠
+빙신쉐이
+게젓
+쓰1레기
+씝창
+시팔
+좆빨아
+닥-쳐
+듣보
+떠라이
+me친
+씨부럴
+ㅅ1ㄲ
+18세키
+시팔새끼
+존니
+십부럴
+잠지뚫어
+ㄱㅅㄲ
+흐젚
+버어지이
+같은 새끼
+씹선
+믜친
+좆까
+씨박색히
+ㅆㅂㄹㅁ
+스ㄹㅜ
+애미잡년
+미친개
+졀리
+싸가지 없
+찌질
+병1신
+썅늠
+항문
+시방쉑희
+개떵
+jaji
+존낙
+난자먹어
+개애걸래
+흐접
+좆같은새끼
+존버
+미치인
+보지핧아줄까
+외1퀴
+슨상님
+보징어
+공지사항
+띠블
+자지빨아
+허벌자지
+쥰나
+보지쥐어짜
+레친
+미친놈
+에무
+자지털
+버어어지
+수셔
+먹.끔
+에라이 퉤
+레기같
+유두빨어
+아아가리
+개씨블
+다꺼져
+쳐받는
+따알따리
+허어벌
+이그니스
+유우까압
+쉬이풀
+대애가리
+꼬추
+자지정개
+개작두년
+쫂
+조오우까튼
+미틴놈
+개씨발넘
+개씁자지
+도라이
+D지고
+버지따먹기
+쉑갸
+자지핧아줘
+쪽본
+조-ㅈ
+쿰척
+조오까튼
+18새끼
+미티넘
+봊
+씹새끼
+개에거얼래
+젼나
+pennis
+쳐발라
+보지핧아줘
+십창녀
+여엄병
+좆까라
+좃마무리
+18ㅅㅐㄲㅣ
+스.루
+옘병
+페니스
+미틴년
+엠플레버
+미틴넘
+자박꼼
+시미발친
+호로잡
+막대쑤셔줘
+자지꽂아
+띠발뇬
+뻑큐
+쉽세
+주둥이
+에라이퉷
+jot같
+여미새
+d져
+고환
+내꺼빨아
+버짓물
+개부달
+걔잡년
+미친색
+창녀버지
+좆도
+졸귘
+지랄
+병닥
+젖탱이
+ㅆ1ㅂ
+좃물냄새
+사까시이
+씨빠빠
+까내리
+정액먹어
+조개넓은년
+엠생
+버지벌려
+섹스하자
+병신셰리
+띠부우울
+씨박색희
+자지뜨더
+젓냄새
+씨이벌
+음경
+개후라년
+뇌 텅
+조옴마니
+염병
+앙기모띠
+개색뀌
+씨팍세끼
+어미따먹자
+기자레기
+자압것
+씹탱이
+씨발
+찌질이
+젖밥
+눈나
+젼낰
+십쉐끼
+젓마무리
+개에가튼
+엿먹어라
+그나물에
+미쳤니
+ㅆㅣ발
+개자식
+ㅆㅣ댕
+찎찎이
+씹자슥
+소음순
+지롤
+시바알
+씨입
+ㅁ친
+개지랄놈
+쉬펄
+씨뷰렬
+니애비
+내미럴
+ㅁㅣ췬
+penis
+김치녀
+ㅅㅡ루
+친년
+ㅂㅊ
+닥2
+빠큐
+보지에자지너
+씨걸
+왕털잠지
+정자핧아
+호로
+돌았네
+띠벨
+졸맛
+띠이벌
+조낸
+ㅆㅂㄻ
+잠짓물마셔
+쌔끼
+개저가튼
+졸멋
+씨벌쉐이
+씨퐁뇬
+개념빠가
+띠빌
+빠굴
+따먹는다
+맘충
+젓만이
+서버
+쉬방
+씌댕
+돌았구만
+시벌탱
+왕털버지
+롬들
+파친
+븅신
+및힌
+그지 같
+존잘
+보지틀래기
+씨빨
+씹년
+개작두넘
+개나대
+뽀지
+쥰니
+보지물
+조개속물
+조개핧아줘
+애미좃물
+드으응신
+부왘
+내자지
+펑글
+유방핧어
+졀라
+잠지털
+후장뚫어
+좀쓰레기
+야dong
+섀키
+앰창
+걸-레
+fuck
+흐졉
+가슴쪼물락
+게세끼
+쓰바새끼
+ㅆㅣ8
+취ㅈ
+씨퐁자지
+곱창나
+자지구녕
+개새기
+ㅆㄺ
+새꺄
+씹세
+졸예
+꼭지
+조온니
+디져라
+띠이버얼
+씨븡
+큰보지
+개잡년
+쓰벌
+망돌
+그지같
+버지냄새
+젓가튼
+18년
+지이라알
+왜저럼
+쉐끼
+존1
+꼴깝
+가슴조물락
+개가튼뇬
+개저엇
+양아치
+조오또
+먹.금
+개넘
+돌으년
+외퀴
+니할애비
+빠가씹새
+괴가튼
+씨팔
+존나
+명존
+이 새끼
+먹끔
+엠뷩신
+조옷만
+쓰댕
+개가튼
+호로자식
+ㅇㅍㅊㅌ
+따아알따리
+보지찌져
+덜떨어
+십녀
+씨이팔
+뒷잇치기
+d지고
+띠이바알
+계새끼
+ㅅ.ㄲ
+잡놈
+더어엉신
+젗같
+씹보지
+개부랄
+조온
+버지털
+자지쓰레기
+yadong
+꼴갑
+wlfkf
+뚫린 입
+씹지랄
+조온마니
+뇌텅
+개고치
+더럽네
+시발년
+nflavor
+왕자지
+띠브울
+좃가튼뇬
+개라슥
+염병할
+뒤치기
+여자ㄸㅏ묵기
+존 나
+니아비
+씹브랄
+기레기
+18nom
+따먹을까
+구1씹
+오크
+배빵
+김대중
+버지썰어
+조우까튼
+개같은년
+색갸
+정자마셔
+화낭년
+발놈
+쥰내
+시박색히
+가슴만져
+뒤져라
+니아범
+보지핧아
+자지핧아
+닝기미
+시발
+조개벌려조?
+개가튼년
+쥐랄
+죠낸
+세끼
+시이발
+졏 같
+쉬팍
+쉬이발
+운영자
+소추
+보지벌리자
+조개마셔줘?
+mi친
+쉬이벌
+개보지년
+쪽바리
+강간한다
+dogbaby
+미쳣네
+ㅂㅅ
+죽여 버리고
+느금
+헤으응
+유방만져
+띠팔
+띠바
+요년
+염-병
+보지핧어
+촌씨브랑이
+굿보지
+도른
+미칭럼
+시1발
+존귀
+씨퐁
+유두핧어
+흉자
+따먹어야지
+대-가-리
+쪼다
+좃보지
+씌발
+뻐규
+좃냄새
+노친네
+씨바라
+미쳤나
+껒여
+미칀
+씹쉐뀌
+허벌년
+믜칀
+쇅끼
+쉬이이이
+씨1발
+시바시바
+쌍보지
+돌앗네
+동생강간
+쉬빨
+pussy
+개마이
+개셈
+개년
+좀물
+머리 텅
+맛없는년
+동성애자
+십탱
+좆
+색키
+새77ㅣ
+씹탱굴이
+보지구멍
+뷰웅신
+쒸발
+정액발사
+쎅쓰
+보지털어
+유방핧아
+정액짜
+가슴빨어
+개젓
+씨벌
+ㅅ발
+ㅅ루
+조깟
+쉽알넘
+짬지
+텐덕
+십팔
+씨8
+니년
+개잡지랄
+보지
+미1친
+사까아시
+씨팍새끼
+닥전
+보쥐
+젓대가리
+쪼다새끼
+ㅂㄹ
+세엑쓰
+씨이발
+씨펄
+게부럴
+병신씨발
+보픈카
+씹팔넘
+미친구녕
+쯰질
+허좁
+미-친
+친구년
+쬰잘
+쬲
+띠부울
+씨븡새끼
+뼈큐
+닥쳐라
+좆만한새끼
+뇌-텅텅
+좃빠구리
+후라덜넘
+보지찢어
+씨댕이
+썅
+십세이
+미치누
+레기네
+존좋
+개걸래
+벌창같은년
+쪼까튼
+별창
+쒸댕
+조개쑤셔줘?
+좆물
+찌1질
+종나
+거시기
+좋만한것
+빻았
+섹끼
+유방
+jazi
+지-랄
+먹1금
+싹스
+자지빨어
+시바
+눈새
+씨발년
+지럴
+줬같은
+친ㅅㄲ
+씨ㅂㅏ
+노옴
+싸개
+좆먹어
+sibal
+따먹어
+니미
+가슴핧어
+좆밥
+조오올라
+씹창녀
+젓같내
+조녜
+쉑쓰
+씌팔
+ㅅㄲ네
+로 꺼져
+쓰브랄쉽세
+mi쳤
+졸싫
+씹못
+쓰벨
+등-신
+펨베
+짱깨
+쌍-판
+🖕
+뇌피셜
+존마니
+좃만아
+쎄엑스
+나빼썅
+와꾸
+십떼끼
+게새끼
+닥후
+시미친발
+ㅆㄹㄱ
+시박쉑히
+좃깟네
+씨발병신
+찝째끼
+시파
+핑끄
+자지핧어
+개보지
+ya동
+호로짜식
+띠이이발
+씨불
+버짓물마셔
+뽄새
+ㅁㅊ
+꺼.지
+디지고
+빠굴이
+슈발
+씹
+대가리
+엿같
+개-새-끼
+boji
+게늠
+졌같은
+좆같은놈
+개후라
+후장뚫어18세키
+띠펄
+십쉐
+엔플레버
+좃빠네
+버지빨어
+조오가튼
+색히
+쉬탱
+머갈
+미친ㅋ
+존.나
+쌕쓰
+개씨발자슥
+붕알
+한년
+凸
+부랄
+섹스해
+에애무
+쳐먹
+닥쳐
+누보햄
+점물
+씨팍넘
+조개따조
+뒤지겠
+좃털
+게저엇
+쓰루
+뽄세
+ㅅㅍ
+죽여뿌고
+ㅅ끼
+ㅉ
+씹버지
+따먹었지
+게자식
+골빈
+써글
+핑프
+씨뱅가리
+쉬발
+또라이
+좃만한쉐이
+쎅스
+ㅅㅋ
+쳐마
+미친넘
+잠지
+새끼야
+똥구뇽
+ㅂㅁㄱ
+쉬이바
+니애미
+후1려
+아닥
+시키가
+유깝
+에에비
+대음순
+찌질한
+븅쉰
+같은새끼
+사까아시이
+보짓물마셔
+김여사
+조또
+항문수셔
+젓나
+시친발미
+씨발롬
+노무
+ㅎㅌㅊ
+씌뎅
+씹뻐럴
+쒸펄
+정액핧아
+앰
+슈레기
+자지빨아줘
+딸달이
+sex
+시팍
+버지구멍
+fuckyou
+ㅇㅒ쁜
+이년
+빠아가
+먹금
+씹부랄
+존1나
+미친 새
+유우방
+화냥년
+걸래년
+빡새끼
+아오 시바
+sex하자
+쉬방새
+씨빡
+쪽발
+딸딸이
+에비
+미친
+떠어라이
+성교해
+저년
+개지랄넘
+죠온나
+여자ㄸㅏ먹기
+호좁
+씹빵구
+방점뱅
+존맛
+처먹을
+시발놈
+빙띤
+자지쑤셔
+지랼
+유방빨아
+좁빠라라
+왕잠지
+섹스
+씨파넘
+띠발
+씨볼탱
+짱꼴라
+자지박어
+창녀
+니아범?
+보지빨아
+싸가지
+주길년
+유발조물락
+tlqkf
+젓가튼쉐이
+창년벼지
+미틴것
+시팍새끼
+시바라지
+ㅈㄹ
+버지핧아
+미췬
+짱골라
+미친년
+애자
+후장
+존웃
+뷰웅시인
+저엊
+쉬벌
+개저씨
+달딸이
+샊기
+쫀귀
+젓같은
+쫀맛
+ㅆㅣ
+성교하자
+골1빈
+벵신
+씹팔
+빠가야로
+글러 먹
+십창
+씨이불
+눈깔 파
+니기미
+뽕알
+후.려
+시빨
+ㅆㅣ팍넘
+십버지
+창년
+오르가즘
+붜지
+빠아아라
+쉬이이이이
+애미씨뱅
+미친새
+저엇
+ㄱㅐㅈㅏ
+미틴
+씨방세
+엑윽
+썅년
+개련
+짱께
+색스
+육갑
+걸레같은년
+떠어라아이
+여자따먹기
+후우장
+창남
+시댕이
+엄창
+18ㅅㅐ끼
+미친새끼
+정신나갔
+씨부랄
+샤발
+죽여버리고
+씨벌탱
+쉬이펄
+시뷰렬
+좇같
+시볼탱
+은새끼
+쉬이불
+나쁜 새끼
+쉽쌔
+개새
+닌기미
+씨입세
+미친쇠리
+돌았나
+런년
+즤랄
+아가리
+내미랄
+빠아구리
+씨가랭넘
+도랐
+씨가랭년
+자위
+입털
+쫓같
+멜리스
+존멋
+보지박어
+좃부랄
+ㅅㅌㅊ
+쌔엑스
+시바류
+허벌
+쉬이방
+썅뇬
+작은보지
+터래기터래기
+뒤이치기
+자지뜯어
+뒤져야지
+애에무
+왕털자지
+쒸팔
+디질
+조올라
+정자먹어
+섹히
+보지물마셔
+버지빨아
+시뷰럴
+느그
+시부울
+쓰뎅
+me틴
+개불랄
+뇬
+개거얼래
+죤나
+풀발
+씨가랭놈
+쉬풀
+씨붕
+zazi
+씹치
+마스터
+좃또
+에라이 퉷
+난자핧아
+ㅅ1ㅂ
+호졉
+빠가십새
+따먹기
+니미랄
+뷩딱
+미친눔
+쉬이빨
+퍄퍄
+꽃휴
+쳐먹고
+뒤지고싶
+걸레핀년
+또오라이
+쫀1
+쑤셔
+씌8
+지 랄
+개씁블
+씨박쉑히
+좃
+ㅆㅣ뎅
+뷰우웅신
+아오시바
+개세
+정병
+씨브럴
+웅엥
+개섹
+보지에자지껴
+자지짤라
+캐럿닷컴
+골 빈
+디-질
+더러운년
+꼬라지
+더엉신
+띠풀
+병1크
+mi틴
+씨퐁보지
+씹귀
+둄마
+뇨온
+버지구녕
+좆만한년
+시방새
+씨퐁넘
+호로새끼
+유두
+조오오조
+세꺄
+깨쌔끼
+씹뽀지
+백보지
+허벌레
+호루자슥
+공알
+씨뷰럴
+새퀴
+보지벌료
+아오ㅅㅂ
+내씨발
+극1혐
+애비
+씹자석
+시부렬
+시녀
+유우깝
+막대핧아줘
+꼴뵈기
+쓰렉
+개색휘
+후빨
+크리토리스
+이프리트
+십때끼
+좆털
+내잠지
+레기다
+개쓰래기
+게가튼
+시붕
+ㅅㅂ
+야동
+씨방새
+뒤져버
+에라이퉤
+졸라
+주둥아리
+미띤
\ No newline at end of file
diff --git a/API_SERVICE/batch_service/resources/mapping/biz_meta.json b/API_SERVICE/batch_service/resources/mapping/biz_meta.json
new file mode 100644
index 00000000..6ae84daa
--- /dev/null
+++ b/API_SERVICE/batch_service/resources/mapping/biz_meta.json
@@ -0,0 +1,330 @@
+{
+ "settings":{
+ "queries.cache.enabled":"true",
+ "refresh_interval":"10s",
+ "max_shingle_diff":10,
+ "analysis":{
+ "tokenizer":{
+ "nori_user_dic":{
+ "type":"nori_tokenizer",
+ "decompound_mode":"discard",
+ "user_dictionary":"user_dic.txt"
+ }
+ },
+ "filter":{
+ "nori_pos":{
+ "type":"nori_part_of_speech",
+ "stoptags":[
+ "E",
+ "J",
+ "SC",
+ "SE",
+ "SF",
+ "SP",
+ "SSC",
+ "SSO",
+ "SY",
+ "VCN",
+ "VCP",
+ "VSV",
+ "VX",
+ "XPN",
+ "XSA",
+ "XSN",
+ "XSV"
+ ]
+ },
+ "synonym":{
+ "type":"synonym_graph",
+ "synonyms_path":"synonyms.txt"
+ },
+ "stopwords":{
+ "type":"stop",
+ "stopwords_path":"stopwords.txt"
+ },
+ "shingle_ten":{
+ "type":"shingle",
+ "token_separator":"",
+ "max_shingle_size":10
+ }
+ },
+ "analyzer":{
+ "korean_analyzer":{
+ "tokenizer":"nori_user_dic",
+ "filter":[
+ "nori_pos",
+ "nori_readingform",
+ "lowercase",
+ "synonym",
+ "stopwords",
+ "remove_duplicates",
+ "shingle_ten"
+ ]
+ }
+ }
+ }
+ },
+ "mappings":{
+ "properties":{
+ "biz_dataset_id":{
+ "type":"keyword"
+ },
+ "data_nm":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ },
+ "fielddata":true
+ },
+ "data_desc":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "ctgry_id":{
+ "type":"keyword"
+ },
+ "ctgry":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "data_prv_desk":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "license":{
+ "type":"text"
+ },
+ "data_shap":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "data_srttn":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "src_url":{
+ "type":"text"
+ },
+ "kywrd":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "data_updt_cyc":{
+ "type":"keyword"
+ },
+ "adm_dep":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "admr_nm":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "file_info":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "file_read_authority":{
+ "type":"keyword"
+ },
+ "status":{
+ "type":"keyword"
+ },
+ "reg_type":{
+ "type":"keyword"
+ },
+ "retv_num":{
+ "type":"long"
+ },
+ "lang":{
+ "type":"text"
+ },
+ "adm_dep_hp":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "updt_nxt_dt":{
+ "type":"date",
+ "format":"strict_date_optional_time_nanos||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
+ },
+ "updt_dt":{
+ "type":"date",
+ "format":"strict_date_optional_time_nanos||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
+ },
+ "reg_dt":{
+ "type":"date",
+ "format":"strict_date_optional_time_nanos||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
+ },
+ "process_dt":{
+ "type":"date",
+ "format":"strict_date_optional_time_nanos||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
+ },
+ "reg_user":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "amd_user":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "reg_date":{
+ "type":"date",
+ "format":"strict_date_optional_time_nanos||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
+ },
+ "amd_date":{
+ "type":"date",
+ "format":"strict_date_optional_time_nanos||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
+
+ },
+ "data_limit":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "othr_use_notes":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "data_global_nm":{
+ "type":"text"
+ },
+ "downl_num":{
+ "type":"long"
+ },
+ "attnt_data_num":{
+ "type":"long"
+ },
+ "share_num":{
+ "type":"long"
+ },
+ "contents":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "has_sample":{
+ "type":"text"
+ },
+ "has_html":{
+ "type":"text"
+ },
+ "analysis_cnt":{
+ "type":"integer"
+ },
+ "data_type":{
+ "type":"text"
+ },
+ "swagger_url":{
+ "type":"text"
+ },
+ "api_url":{
+ "type":"text"
+ },
+ "api_type":{
+ "type":"text"
+ },
+ "data_format":{
+ "type":"text"
+ },
+ "traffic_opt":{
+ "type":"text"
+ },
+ "has_json":{
+ "type":"text"
+ }
+
+ }
+ }
+}
\ No newline at end of file
diff --git a/API_SERVICE/batch_service/resources/mapping/v_biz_meta_oversea_els.json b/API_SERVICE/batch_service/resources/mapping/v_biz_meta_oversea_els.json
new file mode 100644
index 00000000..192f14f9
--- /dev/null
+++ b/API_SERVICE/batch_service/resources/mapping/v_biz_meta_oversea_els.json
@@ -0,0 +1,162 @@
+{
+ "settings":{
+ "queries.cache.enabled":"true",
+ "refresh_interval":"10s",
+ "max_shingle_diff":10,
+ "analysis":{
+ "tokenizer":{
+ "nori_user_dic":{
+ "type":"nori_tokenizer",
+ "decompound_mode":"discard",
+ "user_dictionary":"user_dic.txt"
+ }
+ },
+ "filter":{
+ "nori_pos":{
+ "type":"nori_part_of_speech",
+ "stoptags":[
+ "E",
+ "J",
+ "SC",
+ "SE",
+ "SF",
+ "SP",
+ "SSC",
+ "SSO",
+ "SY",
+ "VCN",
+ "VCP",
+ "VSV",
+ "VX",
+ "XPN",
+ "XSA",
+ "XSN",
+ "XSV"
+ ]
+ },
+ "synonym":{
+ "type":"synonym_graph",
+ "synonyms_path":"synonyms.txt"
+ },
+ "stopwords":{
+ "type":"stop",
+ "stopwords_path":"stopwords.txt"
+ },
+ "shingle_ten":{
+ "type":"shingle",
+ "token_separator":"",
+ "max_shingle_size":10
+ }
+ },
+ "analyzer":{
+ "korean_analyzer":{
+ "tokenizer":"nori_user_dic",
+ "filter":[
+ "nori_pos",
+ "nori_readingform",
+ "lowercase",
+ "synonym",
+ "stopwords",
+ "remove_duplicates",
+ "shingle_ten"
+ ]
+ }
+ }
+ }
+ },
+ "mappings":{
+ "properties":{
+ "biz_dataset_id":{
+ "type":"keyword"
+ },
+ "data_nm":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ },
+ "fielddata":true
+ },
+ "data_desc":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "ctgry":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "re_ctgry":{
+ "type":"text"
+ },
+ "re_data_prv_desk":{
+ "type":"text"
+ },
+ "data_prv_desk":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "data_srttn":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "updt_dt":{
+ "type":"date",
+ "format":"strict_date_optional_time_nanos||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
+ },
+ "reg_dt":{
+ "type":"date",
+ "format":"strict_date_optional_time_nanos||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
+ },
+ "update_time":{
+ "type":"date",
+ "format":"strict_date_optional_time_nanos||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
+ },
+ "data_global_nm":{
+ "type":"text"
+ },
+ "nation":{
+ "type":"text"
+ },
+ "data_type":{
+ "type":"text"
+ },
+ "has_json":{
+ "type":"text"
+ },
+ "retv_num":{
+ "type":"integer"
+ },
+ "share_num":{
+ "type":"integer"
+ }
+
+ }
+ }
+}
\ No newline at end of file
diff --git a/API_SERVICE/batch_service/resources/template/analysisRequestEmail.html b/API_SERVICE/batch_service/resources/template/analysisRequestEmail.html
new file mode 100644
index 00000000..94d0e8fe
--- /dev/null
+++ b/API_SERVICE/batch_service/resources/template/analysisRequestEmail.html
@@ -0,0 +1,39 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+ 자동차데이터포털 알림 메일
+ |
+
+
+
+
+ CONTENTS1
+
+ [ 요청 데이터 목록 ]
+ CONTENTS2
+
+ |
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/API_SERVICE/batch_service/resources/template/emailAthnSend.html b/API_SERVICE/batch_service/resources/template/emailAthnSend.html
new file mode 100644
index 00000000..3d305b4d
--- /dev/null
+++ b/API_SERVICE/batch_service/resources/template/emailAthnSend.html
@@ -0,0 +1,40 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+ 자동차데이터포털 사용자 인증 알림
+ |
+
+
+
+
+ 자동차데이터포털에서 사용자 인증을 위해 보낸 인증번호입니다.
+ CONTENTS1
+
+ |
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/API_SERVICE/batch_service/resources/template/notyEmail.html b/API_SERVICE/batch_service/resources/template/notyEmail.html
new file mode 100644
index 00000000..ec42acb7
--- /dev/null
+++ b/API_SERVICE/batch_service/resources/template/notyEmail.html
@@ -0,0 +1,39 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+ 자동차데이터포털 알림 메일
+ |
+
+
+ |
+
+ |
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/API_SERVICE/batch_service/resources/template/pwdEmailAthn.html b/API_SERVICE/batch_service/resources/template/pwdEmailAthn.html
new file mode 100644
index 00000000..6085e8d4
--- /dev/null
+++ b/API_SERVICE/batch_service/resources/template/pwdEmailAthn.html
@@ -0,0 +1,40 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+ 자동차데이터포털 비밀번호 찾기 인증 알림
+ |
+
+
+
+
+ 자동차데이터포털에서 비밀번호 찾기을 위해 보낸 인증번호입니다.
+ CONTENTS1
+
+ |
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/API_SERVICE/batch_service/resources/template/shareEmail.html b/API_SERVICE/batch_service/resources/template/shareEmail.html
new file mode 100644
index 00000000..656efdc6
--- /dev/null
+++ b/API_SERVICE/batch_service/resources/template/shareEmail.html
@@ -0,0 +1,37 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+ 자동차데이터포털 데이터 공유 메일
+ |
+
+
+
+
+ 자동차데이터포털에서 공유한 데이터입니다.
+ CONTENTS1
+
+ |
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/API_SERVICE/batch_service/resources/template/toolApplyEmail.html b/API_SERVICE/batch_service/resources/template/toolApplyEmail.html
new file mode 100644
index 00000000..068bf569
--- /dev/null
+++ b/API_SERVICE/batch_service/resources/template/toolApplyEmail.html
@@ -0,0 +1,37 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+ 자동차데이터포털 TITLE 신청 메일
+ |
+
+
+
+
+ CONTENTS1
+ CONTENTS2
+
+ |
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/API_SERVICE/meta_service/ELKSearch/__init__.py b/API_SERVICE/common_service/app/__init__.py
similarity index 100%
rename from API_SERVICE/meta_service/ELKSearch/__init__.py
rename to API_SERVICE/common_service/app/__init__.py
diff --git a/API_SERVICE/meta_service/common/__init__.py b/API_SERVICE/common_service/app/common/__init__.py
similarity index 100%
rename from API_SERVICE/meta_service/common/__init__.py
rename to API_SERVICE/common_service/app/common/__init__.py
diff --git a/API_SERVICE/common_service/app/common/config.py b/API_SERVICE/common_service/app/common/config.py
new file mode 100644
index 00000000..0ce3a2d3
--- /dev/null
+++ b/API_SERVICE/common_service/app/common/config.py
@@ -0,0 +1,128 @@
+import logging.config
+import os
+from functools import lru_cache
+from typing import Optional
+
+from pydantic import BaseSettings, PostgresDsn
+
+base_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+print(f"common base_dir :: {base_dir}")
+
+
+class DBInfo(BaseSettings):
+ DB_POOL_RECYCLE: int = 900
+ DB_ECHO: bool = True
+ DB_URL: str
+
+
+class PGInfo(DBInfo):
+ SCHEMA: str
+
+ class Config:
+ env_file = f"{base_dir}/.env"
+ env_file_encoding = "utf-8"
+
+
+class KeycloakInfo(BaseSettings):
+ keycloak_url: Optional[str]
+ admin_username: Optional[str]
+ admin_password: Optional[str]
+ realm: Optional[str]
+ client_id: Optional[str]
+ client_secret: Optional[str]
+
+ class Config:
+ env_file = f"{base_dir}/.env"
+ env_file_encoding = "utf-8"
+
+
+class Settings(BaseSettings):
+ BASE_DIR = base_dir
+ RELOAD: bool
+ TESTING: bool
+
+ DB_INFO: DBInfo
+ KEYCLOAK_INFO: KeycloakInfo
+
+
+class ProdSettings(Settings):
+ RELOAD = False
+ TESTING = False
+
+ DB_INFO = PGInfo()
+ KEYCLOAK_INFO = KeycloakInfo()
+
+
+class LocalSettings(Settings):
+ TESTING: bool = False
+ RELOAD: bool = False
+
+ DB_INFO = PGInfo(
+ DB_POOL_RECYCLE=900,
+ DB_ECHO=True,
+ SCHEMA="sitemng,users,meta,iag,ckan,board,analysis",
+ DB_URL=str(
+ PostgresDsn.build(
+ scheme="postgresql",
+ host="192.168.100.126",
+ port="25432",
+ user="dpmanager",
+ password="hello.dp12#$",
+ path="/dataportal",
+ )
+ ),
+ )
+
+ KEYCLOAK_INFO = KeycloakInfo(
+ keycloak_url="https://auth.bigdata-car.kr",
+ admin_username="admin",
+ admin_password="2021@katech",
+ realm="mobigen",
+ client_id="katech",
+ client_secret="ZWY7WDimS4rxzaXEfwEShYMMly00i8L0",
+ )
+
+
+class TestSettings(LocalSettings):
+ TESTING = True
+ RELOAD = True
+
+
+@lru_cache
+def get_settings() -> Settings:
+ env = os.getenv("APP_ENV", "prod")
+ print(f"env :: {env}")
+ return {"local": LocalSettings(), "test": TestSettings(), "prod": ProdSettings()}[env]
+
+
+settings = get_settings()
+print(settings)
+
+log_config = {
+ "version": 1,
+ "disable_existing_loggers": False,
+ "formatters": {
+ "standard": {"format": "%(asctime)s %(levelname)s [%(filename)s:%(lineno)d] - %(message)s"},
+ },
+ "handlers": {
+ "console_handler": {
+ "class": "logging.StreamHandler",
+ "level": "DEBUG",
+ "formatter": "standard",
+ },
+ },
+ "root": {"level": "DEBUG", "handlers": ["console_handler"], "propagate": False},
+}
+
+if "prod" == os.getenv("APP_ENV", "prod"):
+ log_config["handlers"]["file_handler"] = {
+ "class": "logging.handlers.RotatingFileHandler",
+ "filename": os.path.join(base_dir, "log", "common.log"),
+ "mode": "a",
+ "maxBytes": 20000000,
+ "backupCount": 10,
+ "level": "INFO",
+ "formatter": "standard",
+ }
+ log_config["root"]["handlers"].append("file_handler")
+logging.config.dictConfig(log_config)
diff --git a/API_SERVICE/common_service/app/common/const.py b/API_SERVICE/common_service/app/common/const.py
new file mode 100644
index 00000000..7a165641
--- /dev/null
+++ b/API_SERVICE/common_service/app/common/const.py
@@ -0,0 +1,27 @@
+from libs.database.dml_controller import Base
+
+
+NOT_ALLOWED_TABLES = [""]
+time_zone = "Asia/Seoul"
+auth_no_len = 10
+subject_dict = {
+ "register": "[자동차데이터포털]회원가입을 위한 인증 메일입니다.",
+ "password": "[자동차데이터포털]비밀번호 변경을 위한 인증 메일입니다.",
+ "share": "[자동차데이터포털] 자동차데이터포털에서 공유한 데이터입니다."
+}
+
+
+class EmailAuthTable(Base):
+ table_nm = "tb_email_athn_info"
+ key_column = "email"
+
+
+# login_service/LoginTable과 같음
+class UserInfoTable(Base):
+ table_nm = "tb_user_info"
+ key_column = "user_id"
+
+
+class EmailSendInfoTable(Base):
+ table_nm = "tb_email_send_info"
+ key_column = "email_id"
diff --git a/API_SERVICE/meta_service/database/__init__.py b/API_SERVICE/common_service/app/database/__init__.py
similarity index 100%
rename from API_SERVICE/meta_service/database/__init__.py
rename to API_SERVICE/common_service/app/database/__init__.py
diff --git a/API_SERVICE/common_service/app/database/conn.py b/API_SERVICE/common_service/app/database/conn.py
new file mode 100644
index 00000000..b39f27a1
--- /dev/null
+++ b/API_SERVICE/common_service/app/database/conn.py
@@ -0,0 +1,6 @@
+from sqlalchemy.ext.automap import automap_base
+
+from libs.database.orm import SQLAlchemyConnector
+
+Base = automap_base()
+db = SQLAlchemyConnector(Base)
diff --git a/API_SERVICE/common_service/app/main.py b/API_SERVICE/common_service/app/main.py
new file mode 100644
index 00000000..0f170fff
--- /dev/null
+++ b/API_SERVICE/common_service/app/main.py
@@ -0,0 +1,45 @@
+import logging
+
+import uvicorn
+from fastapi import FastAPI
+from starlette.middleware.base import BaseHTTPMiddleware
+
+from common_service.app.common.config import settings
+from common_service.app.database.conn import db
+from common_service.app.routes.v1 import code_info, select, execute, auth_email
+from libs.auth.keycloak import keycloak
+from libs.middlewares.keycloak_middleware import refresh_token_from_cookie_wrapper
+
+logger = logging.getLogger()
+
+
+def create_app():
+ app_ = FastAPI()
+ logger.info(settings.dict())
+ db.init_app(app_, **settings.dict())
+
+ keycloak.set_url(settings.KEYCLOAK_INFO.keycloak_url)
+ app_.add_middleware(
+ BaseHTTPMiddleware,
+ dispatch=refresh_token_from_cookie_wrapper(
+ keycloak=keycloak,
+ realm=settings.KEYCLOAK_INFO.realm,
+ client_id=settings.KEYCLOAK_INFO.client_id,
+ client_secret=settings.KEYCLOAK_INFO.client_secret,
+ logger=logger,
+ ),
+ )
+
+ app_.include_router(select.router, prefix="/portal/api/common")
+ app_.include_router(execute.router, prefix="/portal/api/common")
+ app_.include_router(auth_email.router, prefix="/portal/api/common")
+ app_.include_router(code_info.router, prefix="/portal/api/sitemng")
+
+ return app_
+
+
+app = create_app()
+
+
+if __name__ == "__main__":
+ uvicorn.run("main:app", host="0.0.0.0", port=8090, reload=True)
diff --git a/API_SERVICE/meta_service/routes/__init__.py b/API_SERVICE/common_service/app/routes/__init__.py
similarity index 100%
rename from API_SERVICE/meta_service/routes/__init__.py
rename to API_SERVICE/common_service/app/routes/__init__.py
diff --git a/API_SERVICE/meta_service/routes/v1/__init__.py b/API_SERVICE/common_service/app/routes/v1/__init__.py
similarity index 100%
rename from API_SERVICE/meta_service/routes/v1/__init__.py
rename to API_SERVICE/common_service/app/routes/v1/__init__.py
diff --git a/API_SERVICE/common_service/app/routes/v1/auth_email.py b/API_SERVICE/common_service/app/routes/v1/auth_email.py
new file mode 100644
index 00000000..c852a384
--- /dev/null
+++ b/API_SERVICE/common_service/app/routes/v1/auth_email.py
@@ -0,0 +1,174 @@
+import logging
+import string
+import random
+import uuid
+
+from datetime import datetime
+from fastapi import APIRouter, Depends
+from pydantic import BaseModel
+from passlib.context import CryptContext
+
+from libs.database.connector import Executor
+
+from common_service.app.database.conn import db
+from common_service.app.common.const import (
+ EmailSendInfoTable,
+ EmailAuthTable,
+ UserInfoTable,
+ auth_no_len,
+ subject_dict
+)
+
+
+logger = logging.getLogger()
+router = APIRouter()
+
+
+# emailAthnPass
+class EmailAuthFail(Exception):
+ pass
+
+
+class EmailAthnPass(BaseModel):
+ email: str
+ athn_no: str
+ new_password: str
+
+
+# emailAthnSend
+class EmailNotAuth(Exception):
+ pass
+
+
+class EmailNotExist(Exception):
+ pass
+
+
+class EmailAthnSend(BaseModel):
+ email: str
+ msg_type: str # register or password
+
+
+# emailAthnCnfm
+class EmailAuthFail(Exception):
+ pass
+
+
+class EmailAthnCnfm(BaseModel):
+ email: str
+ athn_no: str
+
+
+# emailDataShare
+class EmailInfo(BaseModel):
+ email: str
+ msg_type: str # share
+ message: str
+
+
+# emailAthnSend
+def make_auth_no():
+ string_pool = string.ascii_letters + string.digits
+ auth_no = ""
+ for _ in range(int(auth_no_len)):
+ auth_no += random.choice(string_pool)
+ return auth_no
+
+
+def email_history(session, contents, param):
+ history = {
+ "email_id": uuid.uuid4(),
+ "rcv_adr": param.email,
+ "title": subject_dict[param.msg_type],
+ "contents": contents,
+ "tmplt_cd": param.msg_type,
+ "sttus": "REQ",
+ "reg_date": datetime.now()
+ }
+ session.execute(**EmailSendInfoTable.get_execute_query("INSERT", history))
+
+
+@router.post("/emailAthnPass")
+def auth_pass(email_pass: EmailAthnPass, session: Executor = Depends(db.get_db)):
+ try:
+ email_info = session.query(**EmailAuthTable.get_select_query(email_pass.email)).first()
+
+ if email_info["athn_no"] == email_pass.athn_no and email_info["athn_yn"] == "Y":
+ new_password = CryptContext(schemes=["bcrypt"], deprecated="auto").hash(email_pass.new_password)
+ user_info = session.query(**UserInfoTable.get_select_query(email_pass.email)).first()
+ user_info["user_password"] = new_password
+ session.execute(**UserInfoTable.get_execute_query("UPDATE", user_info))
+ result = {"result": 1, "msg": "Successfully Auth Confirm."}
+ else:
+ result = {"result": 0, "msg": "EmailAuthFail"}
+ except Exception as e:
+ result = {"result": 0, "errorMessage": str(e)}
+ logger.error(e, exc_info=True)
+ return result
+
+
+@router.post("/emailAthnSend")
+def auth_send(auth_send: EmailAthnSend, session: Executor = Depends(db.get_db)):
+ # todo: insert 구문 축약
+ try:
+ auth_no = make_auth_no()
+ exist_mail = session.query(**EmailAuthTable.get_select_query(auth_send.email)).first()
+
+ if auth_send.msg_type == "password":
+ if exist_mail is None:
+ raise EmailNotExist
+ if exist_mail["athn_yn"] == "N":
+ raise EmailNotAuth
+
+ if exist_mail is None:
+ # insert
+ method = "INSERT"
+ exist_mail = {
+ "email": auth_send.email,
+ "athn_no": auth_no,
+ "athn_yn": "N",
+ "send_date": "NOW()"
+ }
+ else:
+ # update
+ method = "UPDATE"
+ exist_mail["athn_no"] = auth_no
+ exist_mail["send_date"] = "NOW()"
+
+ session.execute(**EmailAuthTable.get_execute_query(method, exist_mail))
+
+ # mail history insert
+ email_history(session, auth_no, auth_send)
+
+ result = {"result": 1, "msg": "Successfully Auth Password."}
+ except Exception as e:
+ result = {"result": 0, "errorMessage": str(e)}
+ logger.error(e, exc_info=True)
+ return result
+
+
+@router.post("/emailAthnCnfm")
+def auth_confirm(auth_conf: EmailAthnCnfm, session: Executor = Depends(db.get_db)):
+ try:
+ email_info = session.query(**EmailAuthTable.get_select_query(auth_conf.email)).first()
+ if email_info["athn_no"] == auth_conf.athn_no:
+ email_info["athn_yn"] = "Y"
+ email_info["athn_date"] = "NOW()"
+ session.execute(**EmailAuthTable.get_execute_query("UPDATE",email_info))
+ result = {"result": 1, "msg": "Successfully Auth Confirm."}
+ else:
+ result = {"result": 0, "msg": "EmailAuthFail"}
+ except Exception as e:
+ result = {"result": 0, "errorMessage": str(e)}
+ logger.error(e, exc_info=True)
+ return result
+
+
+@router.post("/emailDataShare")
+def data_share(email_info: EmailInfo, session: Executor = Depends(db.get_db)):
+ try:
+ email_history(session, email_info.message, email_info)
+ result = {"result": 1, "msg": "200"}
+ except Exception as e:
+ result = {"result": 0, "errorMessage": e}
+ return result
diff --git a/API_SERVICE/common_service/app/routes/v1/code_info.py b/API_SERVICE/common_service/app/routes/v1/code_info.py
new file mode 100644
index 00000000..81c70cb9
--- /dev/null
+++ b/API_SERVICE/common_service/app/routes/v1/code_info.py
@@ -0,0 +1,60 @@
+import logging
+
+from fastapi import APIRouter, Depends
+from starlette.responses import JSONResponse
+
+from common_service.app.database.conn import db
+from libs.database.connector import Executor
+
+logger = logging.getLogger()
+router = APIRouter()
+
+
+@router.get("/getCodeInfo")
+async def get_code_detail(groupId, session: Executor = Depends(db.get_db)):
+ table_nm = "tb_code_detail"
+ rows, _ = session.query(
+ table_nm=table_nm,
+ where_info=[{"table_nm": table_nm, "key": "code_group_id", "value": groupId, "compare_op": "="}],
+ ).all()
+ code_infos = [
+ {"code_id": row["code_id"], "code_nm": row["code_nm"], "data_1": row["data_1"], "data_2": row["data_2"]}
+ if rows
+ else []
+ for row in rows
+ ]
+
+ return JSONResponse(status_code=200, content={"result": 1, "data": {"list": code_infos}, "errorMessage": ""})
+
+
+@router.get("/getCodeList")
+async def get_code_list(
+ perPage: int, curPage: int, gropId: str, keyword: str = "", session: Executor = Depends(db.get_db)
+) -> JSONResponse:
+ table_nm = "tb_code_detail"
+ query_data = {
+ "table_nm": table_nm,
+ "where_info": [{"table_nm": table_nm, "key": "code_group_id", "value": gropId, "compare_op": "="}],
+ }
+
+ if keyword:
+ # select *, row_number() ... order condition code_nm SIMILAR to %{keyword}% DESC
+ query_data["where_info"].append(
+ {"table_nm": table_nm, "key": "code_nm", "value": keyword, "compare_op": "like", "op": "AND"}
+ )
+ else:
+ # select *, row_number() ... order condition reg_date ASC
+ ...
+
+ query_data["page_info"] = {"per_page": perPage, "cur_page": curPage}
+ rows, tcnt = session.query(**query_data).all()
+
+ logger.debug(f"rows :: {rows}, total cnt :: {tcnt}")
+ code_info = []
+ if rows:
+ code_info = [{"code_id": row["code_id"], "code_nm": row["code_nm"]} for row in rows]
+ logger.debug(code_info)
+
+ return JSONResponse(
+ status_code=200, content={"result": 1, "errorMessage": "", "data": {"totalcount": str(tcnt), "list": code_info}}
+ )
diff --git a/API_SERVICE/common_service/routes/v1/execute.py b/API_SERVICE/common_service/app/routes/v1/execute.py
similarity index 52%
rename from API_SERVICE/common_service/routes/v1/execute.py
rename to API_SERVICE/common_service/app/routes/v1/execute.py
index 9e5ac91a..e9438ea3 100644
--- a/API_SERVICE/common_service/routes/v1/execute.py
+++ b/API_SERVICE/common_service/app/routes/v1/execute.py
@@ -4,13 +4,10 @@
from fastapi import APIRouter, Depends, Request
from pydantic import BaseModel
from starlette.responses import JSONResponse
-from common_service.common.const import ALGORITHM, NOT_ALLOWED_TABLES, SECRET_KEY
-from common_service.database.conn import db
-import jwt
+from common_service.app.database.conn import db
from libs.database.connector import Executor
-
logger = logging.getLogger()
@@ -24,16 +21,12 @@ class CommonExecute(BaseModel):
router = APIRouter()
-@router.post("/common-execute")
+@router.post("/commonExecute")
async def common_execute(request: Request, params: List[CommonExecute], session: Executor = Depends(db.get_db)):
try:
for param in params:
- if param.table_nm in NOT_ALLOWED_TABLES:
- roleidx = get_roleidx_from_token(request)
- if roleidx != "0":
- return JSONResponse(content={"result": 0, "errorMessage": "NotAllowedTable"})
- elif param.table_nm == "USR_MGMT" and param.method == "INSERT":
- return JSONResponse(content={"result": 0, "errorMessage": "use register api"})
+ # TODO: 테이블 접근 제한에 대한 권한 확인등의 작업 필요
+ logger.info(f"execute :: {param}")
session.execute(**param.dict())
return JSONResponse(content={"result": 1, "errorMessage": ""}, status_code=200)
except Exception as e:
@@ -41,10 +34,3 @@ async def common_execute(request: Request, params: List[CommonExecute], session:
for param in params:
logger.info(param.dict())
return JSONResponse(content={"result": 0, "errorMessage": str(e)}, status_code=400)
-
-
-def get_roleidx_from_token(request: Request) -> dict:
- token = request.headers.get("Authorization")
- if token.startswith("Bearer "):
- token = token[7:]
- return dict(jwt.decode(token, SECRET_KEY, algorithms=ALGORITHM)).get("roleidx")
diff --git a/API_SERVICE/common_service/routes/v1/select.py b/API_SERVICE/common_service/app/routes/v1/select.py
similarity index 63%
rename from API_SERVICE/common_service/routes/v1/select.py
rename to API_SERVICE/common_service/app/routes/v1/select.py
index b73ede26..6a0b2da1 100644
--- a/API_SERVICE/common_service/routes/v1/select.py
+++ b/API_SERVICE/common_service/app/routes/v1/select.py
@@ -4,9 +4,8 @@
from fastapi import Depends, APIRouter
from pydantic import BaseModel
from starlette.responses import JSONResponse
-from common_service.common.config import settings
-from common_service.database.conn import db
+from common_service.app.database.conn import db
from libs.database.connector import Executor
@@ -49,44 +48,36 @@ class CommonSelect(BaseModel):
logger = logging.getLogger()
-@router.post("/common-select")
+@router.post("/commonSelect")
async def common_select(params: CommonSelect, session: Executor = Depends(db.get_db)):
- """
- {
- "table_nm":"banr_adm_bas",
- "where_info":[
- {
- "key":"banr_div",
- "value":"T",
- "table_nm":"banr_adm_bas",
- "compare_op":"Equal","op":""
- },
- {
- "key":"pstng_fns_date",
- "compare_op":">=",
- "value":"2023-04-12 00:00:00",
- "table_nm":"banr_adm_bas",
- "op":"AND"
- }
- ]
- }
- {"table_nm":"vw_srhwd_find_tmscnt_sum","order_info":{"key":"find_tmscnt","value":"DESC","table_nm":"vw_srhwd_find_tmscnt_sum","order":"DESC"},"page_info":{"per_page":10,"cur_page":1}}
- """
try:
+ logger.info(f"params :: {params}")
rows = session.query(**params.dict()).all()
+ header = get_column_desc(params.table_nm, session)
return JSONResponse(
content={
"data": {
"count": rows[1] if rows else 0,
"body": rows[0] if rows else [],
- "header": session.get_column_info(params.table_nm, settings.DB_INFO.SCHEMA),
+ # "header": session.get_column_info(params.table_nm, settings.DB_INFO.SCHEMA),
+ "header": [
+ {"column_name": info["eng_nm"], "kor_column_name": info["kor_nm"]} for info in header[0]
+ ],
},
"result": 1,
"errorMessage": "",
},
status_code=200,
)
-
except Exception as e:
logger.error(f"{params.dict()}, {str(e)}", exc_info=True)
return JSONResponse(content={"result": 0, "errorMessage": str(e)}, status_code=400)
+
+
+def get_column_desc(table_nm, session: Executor):
+ return session.query(
+ table_nm="tb_table_list",
+ key="table_id",
+ join_info={"table_nm": "tb_table_column_info", "key": "table_id"},
+ where_info=[{"table_nm": "tb_table_list", "key": "table_nm", "value": table_nm, "compare_op": "="}],
+ ).all()
diff --git a/API_SERVICE/common_service/common/config.py b/API_SERVICE/common_service/common/config.py
deleted file mode 100644
index d6a8e1db..00000000
--- a/API_SERVICE/common_service/common/config.py
+++ /dev/null
@@ -1,107 +0,0 @@
-import json
-import logging.config
-import os
-from functools import lru_cache
-from typing import Union
-
-from pydantic import BaseSettings, PostgresDsn, validator, SecretStr
-
-base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-print(f"project base_dir :: {base_dir}")
-
-
-class DBInfo(BaseSettings):
- HOST: str = ""
- PORT: str = ""
- USER: str = ""
- PASS: SecretStr = ""
- BASE: str = ""
- SCHEMA: str = ""
-
- def get_dsn(self):
- return ""
-
-
-class PGInfo(DBInfo):
- type: str = "postgres"
- SCHEMA: str = ""
-
- def get_dsn(self):
- return str(
- PostgresDsn.build(
- scheme="postgresql",
- host=self.HOST,
- port=self.PORT,
- user=self.USER,
- password=self.PASS.get_secret_value(),
- path=f"/{self.BASE}",
- )
- )
-
-
-class TiberoInfo(DBInfo):
- type: str = "tibero"
-
- def get_dsn(self):
- return f"DSN={self.BASE};UID={self.USER};PWD={self.PASS.get_secret_value()}"
-
-
-class Settings(BaseSettings):
- BASE_DIR = base_dir
- DB_POOL_RECYCLE: int = 900
- DB_ECHO: bool = False
- RELOAD: bool = True
- TESTING: bool = True
-
- DB_INFO: DBInfo = DBInfo()
- DB_URL: Union[str, PostgresDsn] = None
-
- @validator("DB_URL", pre=True, always=True)
- def assemble_db_url(cls, v, values):
- if all(value is not None for value in values.values()):
- return values.get("DB_INFO").get_dsn()
- raise ValueError("Not all PostgreSQL database connection values were provided.")
-
-
-class ProdSettings(Settings):
- RELOAD = False
- TESTING = False
-
- class Config:
- env_file = f"{base_dir}/.env"
- env_file_encoding = "utf-8"
-
-
-class LocalSettings(Settings):
- TESTING: bool = False
- DB_POOL_RECYCLE: int = 900
- DB_ECHO: bool = True
- RELOAD: bool = False
-
- # DB_INFO = PGInfo(
- # HOST="192.168.100.126", PORT="25432", USER="dpsi", PASS="hello.sitemng12#$", BASE="ktportal", SCHEMA="sitemng"
- # )
-
- DB_INFO: TiberoInfo = TiberoInfo(
- HOST="192.168.101.164", PORT="8629", USER="dhub", PASS="dhub1234", BASE="tibero", SCHEMA="DHUB"
- )
-
-
-class TestSettings(LocalSettings):
- ...
-
-
-@lru_cache
-def get_settings() -> Settings:
- env = os.getenv("APP_ENV", "prod")
- print(env)
- return {"local": LocalSettings(), "test": TestSettings(), "prod": ProdSettings()}[env]
-
-
-settings = get_settings()
-print(settings)
-
-with open(os.path.join(base_dir, "logging.json")) as f:
- log_config = json.load(f)
- logging.config.dictConfig(log_config)
-logger = logging.getLogger()
diff --git a/API_SERVICE/common_service/common/const.py b/API_SERVICE/common_service/common/const.py
deleted file mode 100644
index b96be432..00000000
--- a/API_SERVICE/common_service/common/const.py
+++ /dev/null
@@ -1,5 +0,0 @@
-NOT_ALLOWED_TABLES = ["USR_MGMT"]
-SECRET_KEY = "09d25e094faa6ca2556c818166b7a9563b93f7099f6f0f4caa6cf63b88e8d3e7"
-ALGORITHM = "HS256"
-EXPIRE_DELTA = 1
-COOKIE_NAME = "user-docean-access-token"
diff --git a/API_SERVICE/common_service/database/conn.py b/API_SERVICE/common_service/database/conn.py
deleted file mode 100644
index 44013bcc..00000000
--- a/API_SERVICE/common_service/database/conn.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from fastapi import FastAPI
-from sqlalchemy import MetaData
-from sqlalchemy.ext.automap import automap_base
-from sqlalchemy.orm import declarative_base
-
-from common_service.common.config import settings
-from libs.database.tibero import TiberoConnector
-from libs.database.orm import SQLAlchemyConnector
-
-
-# TODO: SQLAlchemy version 수정필요
-class SQLAlchemyForCommon(SQLAlchemyConnector):
- def __init__(self, app: FastAPI = None, **kwargs):
- self._table_dict = None
- if app is not None:
- self.init_app(app=app, **kwargs)
-
- metadata = MetaData()
- for schema in kwargs.get("PG_SCHEMA").split(","):
- metadata.reflect(bind=self.engine, views=True, schema=schema)
-
- self._Base = automap_base(metadata=metadata)
- self._Base.prepare()
-
- # self._table_dict = dict(metadata.tables)
-
-
-Base = declarative_base()
-db = SQLAlchemyForCommon(Base) if settings.DB_INFO.type != "tibero" else TiberoConnector()
diff --git a/API_SERVICE/common_service/gunicorn.conf.py b/API_SERVICE/common_service/gunicorn.conf.py
index 8d3a8b45..122c1a57 100644
--- a/API_SERVICE/common_service/gunicorn.conf.py
+++ b/API_SERVICE/common_service/gunicorn.conf.py
@@ -16,6 +16,7 @@
# Must be a positive integer. Generally set in the 64-2048
# range.
#
+import os
bind = "0.0.0.0:8000"
backlog = 2048
@@ -144,10 +145,33 @@
#
# A string of "debug", "info", "warning", "error", "critical"
#
-logfile = "./log/common.log"
-errorlog = "./log/common-error.log"
-loglevel = "info"
-accesslog = "./log/common.log"
+
+
+def get_log_path():
+ import os
+
+ path_ = os.path.join(os.path.dirname(os.path.abspath(__file__)), "log")
+ if not os.path.exists(path_):
+ os.makedirs(path_)
+ print(f"make dir {path_}")
+
+ return path_
+
+
+app_env = os.getenv("APP_ENV", "prod")
+if app_env == "prod":
+ loglevel = "info"
+ log_name = "gunicorn-common"
+ log_dir_path = get_log_path()
+ logfile = os.path.join(log_dir_path, log_name + ".log")
+ errorlog = os.path.join(log_dir_path, log_name + "-error.log")
+ accesslog = logfile
+else:
+ loglevel = "debug"
+ logfile = "-"
+ errorlog = "-"
+ accesslog = "-"
+
access_log_format = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"'
#
@@ -163,7 +187,7 @@
# A string or None to choose a default of something like 'gunicorn'.
#
-proc_name = "API-Service-common"
+proc_name = "API-Common-Service"
#
@@ -219,4 +243,4 @@ def worker_int(worker):
def worker_abort(worker):
- worker.log.info("worker received SIGABRT signal")
\ No newline at end of file
+ worker.log.info("worker received SIGABRT signal")
diff --git a/API_SERVICE/common_service/gunicorn.sh b/API_SERVICE/common_service/gunicorn.sh
index 0149792b..145d6b3d 100755
--- a/API_SERVICE/common_service/gunicorn.sh
+++ b/API_SERVICE/common_service/gunicorn.sh
@@ -1,13 +1,13 @@
#!/bin/bash
root_path="$( cd "$( dirname "$0" )" && pwd -P )"
-pid_path="$root_path/gunicorn-common.pid"
+pid_path="$root_path/gunicorn-router.pid"
echo $pid_path
# gunicorn 실행 명령어
start_gunicorn() {
- gunicorn main:app --bind 0.0.0.0:20000 -c gunicorn.conf.py -D --pid $pid_path
+ gunicorn app.main:app --bind 0.0.0.0:30001 -c gunicorn.conf.py -D --pid $pid_path
sleep 2
pid=$(cat $pid_path)
echo "Gunicorn started. PID: $pid"
diff --git a/API_SERVICE/common_service/logging.json b/API_SERVICE/common_service/logging.json
deleted file mode 100644
index 7c5ff75e..00000000
--- a/API_SERVICE/common_service/logging.json
+++ /dev/null
@@ -1,42 +0,0 @@
-{
- "version": 1,
- "disable_existing_loggers": false,
- "formatters": {
- "default": {
- "format": "%(asctime)s %(levelname)s [%(filename)s:%(lineno)d] - %(message)s"
- }
- },
- "handlers": {
- "console": {
- "class": "logging.StreamHandler",
- "level": "DEBUG",
- "formatter": "default"
- },
- "file": {
- "class": "logging.handlers.RotatingFileHandler",
- "level": "DEBUG",
- "formatter": "default",
- "filename": "./log/common.log",
- "mode": "a",
- "maxBytes": 20000000,
- "backupCount": 10
- }
- },
- "loggers": {
- "root": {
- "level": "DEBUG",
- "handlers": ["console", "file"],
- "propagate": false
- },
- "uvicorn.access": {
- "level": "INFO",
- "handlers": ["console", "file"],
- "propagate": false
- },
- "sqlalchemy.engine": {
- "level": "INFO",
- "handlers": ["console", "file"],
- "propagate": false
- }
- }
-}
\ No newline at end of file
diff --git a/API_SERVICE/common_service/main.py b/API_SERVICE/common_service/main.py
deleted file mode 100644
index 62a00851..00000000
--- a/API_SERVICE/common_service/main.py
+++ /dev/null
@@ -1,25 +0,0 @@
-import uvicorn
-from fastapi import FastAPI
-
-from common_service.common.config import logger
-from common_service.common.config import settings
-from common_service.database.conn import db
-from common_service.routes.v1 import select, execute
-
-
-def create_app():
- app_ = FastAPI()
- logger.info(settings.dict())
- db.init_app(app_, **settings.dict())
-
- app_.include_router(select.router, prefix="/portal/api/common")
- app_.include_router(execute.router, prefix="/portal/api/common")
-
- return app_
-
-
-app = create_app()
-
-
-if __name__ == "__main__":
- uvicorn.run("main:app", host="0.0.0.0", port=8090, reload=True)
diff --git a/API_SERVICE/common_service/requirements.txt b/API_SERVICE/common_service/requirements.txt
index 2077cf10..b76eb6f1 100644
--- a/API_SERVICE/common_service/requirements.txt
+++ b/API_SERVICE/common_service/requirements.txt
@@ -1,8 +1,12 @@
aiohttp==3.8.4
aiosignal==1.3.1
anyio==3.6.2
+APScheduler==3.10.1
async-timeout==4.0.2
attrs==23.1.0
+bcrypt==4.0.1
+boto3==1.28.67
+botocore==1.31.67
certifi==2022.12.7
cffi==1.15.1
charset-normalizer==3.1.0
@@ -10,23 +14,43 @@ click==8.1.3
cryptography==40.0.2
elastic-transport==8.4.0
elasticsearch==8.7.0
+exceptiongroup==1.1.2
fastapi==0.95.1
frozenlist==1.3.3
+greenlet==2.0.2
gunicorn==20.1.0
h11==0.14.0
+httpcore==0.17.3
+httpx==0.24.1
idna==3.4
+iniconfig==2.0.0
+jmespath==1.0.1
multidict==6.0.4
-psycopg2==2.9.6
-psycopg2-binary==2.9.6
+numpy==1.26.0
+packaging==23.1
+pandas==2.1.0
+passlib==1.7.4
+Pillow==10.0.1
+pluggy==1.2.0
+psycopg2-binary==2.9.9
pycparser==2.21
+pycryptodome==3.18.0
pydantic==1.10.7
PyJWT==2.7.0
-pyodbc==4.0.39
+pytest==7.4.0
+python-dateutil==2.8.2
python-dotenv==1.0.0
+pytz==2023.3
+requests==2.31.0
+s3transfer==0.7.0
+six==1.16.0
sniffio==1.3.0
-SQLAlchemy==2.0.9
+SQLAlchemy==2.0.22
starlette==0.26.1
+tomli==2.0.1
typing_extensions==4.5.0
+tzdata==2023.3
+tzlocal==5.0.1
urllib3==1.26.15
uvicorn==0.21.1
yarl==1.8.2
diff --git a/API_SERVICE/common_service/Dockerfile b/API_SERVICE/login_service/app/__init__.py
similarity index 100%
rename from API_SERVICE/common_service/Dockerfile
rename to API_SERVICE/login_service/app/__init__.py
diff --git a/API_SERVICE/docker-compose.yml b/API_SERVICE/login_service/app/common/__init__.py
similarity index 100%
rename from API_SERVICE/docker-compose.yml
rename to API_SERVICE/login_service/app/common/__init__.py
diff --git a/API_SERVICE/login_service/app/common/config.py b/API_SERVICE/login_service/app/common/config.py
new file mode 100644
index 00000000..98afbb1d
--- /dev/null
+++ b/API_SERVICE/login_service/app/common/config.py
@@ -0,0 +1,132 @@
+import logging.config
+import os
+from functools import lru_cache
+from typing import Optional
+
+from pydantic import BaseSettings, PostgresDsn
+
+base_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+print(f"login base_dir :: {base_dir}")
+
+
+class DBInfo(BaseSettings):
+ DB_POOL_RECYCLE: int = 900
+ DB_ECHO: bool = True
+ DB_URL: str
+
+
+class PGInfo(DBInfo):
+ SCHEMA: str
+
+ class Config:
+ env_file = f"{base_dir}/.env"
+ env_file_encoding = "utf-8"
+
+
+class KeycloakInfo(BaseSettings):
+ keycloak_url: Optional[str]
+ admin_username: Optional[str]
+ admin_password: Optional[str]
+ realm: Optional[str]
+ client_id: Optional[str]
+ client_secret: Optional[str]
+
+ class Config:
+ env_file = f"{base_dir}/.env"
+ env_file_encoding = "utf-8"
+
+
+class Settings(BaseSettings):
+ BASE_DIR = base_dir
+ RELOAD: bool
+ TESTING: bool
+
+ DB_INFO: DBInfo
+ KEYCLOAK_INFO: KeycloakInfo
+
+
+class ProdSettings(Settings):
+ RELOAD = False
+ TESTING = False
+
+ DB_INFO = PGInfo()
+ KEYCLOAK_INFO = KeycloakInfo()
+
+ class Config:
+ env_file = f"{base_dir}/.env"
+ env_file_encoding = "utf-8"
+
+
+class LocalSettings(Settings):
+ TESTING: bool = False
+ RELOAD: bool = False
+
+ DB_INFO = PGInfo(
+ DB_POOL_RECYCLE=900,
+ DB_ECHO=False,
+ SCHEMA="sitemng,users,meta,iag,ckan,board,analysis",
+ DB_URL=str(
+ PostgresDsn.build(
+ scheme="postgresql",
+ host="192.168.100.126",
+ port="25432",
+ user="dpmanager",
+ password="hello.dp12#$",
+ path="/dataportal",
+ )
+ ),
+ )
+
+ KEYCLOAK_INFO = KeycloakInfo(
+ keycloak_url="https://auth.bigdata-car.kr",
+ admin_username="admin",
+ admin_password="2021@katech",
+ realm="mobigen",
+ client_id="katech",
+ client_secret="ZWY7WDimS4rxzaXEfwEShYMMly00i8L0",
+ )
+
+
+class TestSettings(LocalSettings):
+ TESTING = True
+ RELOAD = True
+
+
+@lru_cache
+def get_settings() -> Settings:
+ env = os.getenv("APP_ENV", "prod")
+ print(env)
+ return {"local": LocalSettings(), "test": TestSettings(), "prod": ProdSettings()}[env]
+
+
+settings = get_settings()
+print(settings)
+
+log_config = {
+ "version": 1,
+ "disable_existing_loggers": False,
+ "formatters": {
+ "standard": {"format": "%(asctime)s %(levelname)s [%(filename)s:%(lineno)d] - %(message)s"},
+ },
+ "handlers": {
+ "console_handler": {
+ "class": "logging.StreamHandler",
+ "level": "DEBUG",
+ "formatter": "standard",
+ },
+ },
+ "root": {"level": "DEBUG", "handlers": ["console_handler"], "propagate": False},
+}
+
+if "prod" == os.getenv("APP_ENV", "prod"):
+ log_config["handlers"]["file_handler"] = {
+ "class": "logging.handlers.RotatingFileHandler",
+ "filename": os.path.join(base_dir, "log", "login.log"),
+ "mode": "a",
+ "maxBytes": 20000000,
+ "backupCount": 10,
+ "level": "INFO",
+ "formatter": "standard",
+ }
+ log_config["root"]["handlers"].append("file_handler")
+logging.config.dictConfig(log_config)
diff --git a/API_SERVICE/login_service/app/common/const.py b/API_SERVICE/login_service/app/common/const.py
new file mode 100644
index 00000000..4014c9cf
--- /dev/null
+++ b/API_SERVICE/login_service/app/common/const.py
@@ -0,0 +1,103 @@
+from typing import Dict
+
+
+SECRET_KEY = "09d25e094faa6ca2556c818166b7a9563b93f7099f6f0f4caa6cf63b88e8d3e7"
+ALGORITHM = "HS256"
+EXPIRE_DELTA = 1
+COOKIE_NAME = "user-katech-access-token"
+IRIS_COOKIE_NAME = "x-access-token"
+
+
+class LoginTable:
+ table_nm = "tb_user_info"
+ key_column = "user_id"
+
+ @staticmethod
+ def get_query_data(user_id: str) -> Dict:
+ return {
+ "table_nm": LoginTable.table_nm,
+ "where_info": [
+ {
+ "table_nm": LoginTable.table_nm,
+ "key": LoginTable.key_column,
+ "value": user_id,
+ "compare_op": "=",
+ "op": "",
+ }
+ ],
+ }
+
+
+class RegisterTable:
+ table_nm = "tb_user_info"
+
+ @staticmethod
+ def get_query_data(data: Dict) -> Dict:
+ return {"method": "INSERT", "table_nm": RegisterTable.table_nm, "data": data}
+
+ @staticmethod
+ def upsert_query_data(method: str, data: Dict) -> Dict:
+ method = method.upper()
+ queryDict = {"method": method, "table_nm": RegisterTable.table_nm, "data": data}
+ if method == "UPDATE":
+ queryDict["key"] = ["user_id"]
+ return queryDict
+
+
+class IrisInfoTable:
+ table_nm = "tb_iris_user_info"
+ key_column = "user_id"
+
+ def get_query_data(self, user_id: str) -> Dict:
+ return {
+ "table_nm": self.table_nm,
+ "where_info": [
+ {
+ "table_nm": self.table_nm,
+ "key": self.key_column,
+ "value": user_id,
+ "compare_op": "=",
+ "op": "",
+ }
+ ],
+ }
+
+ @staticmethod
+ def upsert_query_data(method: str, data: Dict) -> Dict:
+ method = method.upper()
+ queryDict = {"method": method, "table_nm": IrisInfoTable.table_nm, "data": data}
+ if method == "UPDATE":
+ queryDict["key"] = ["user_id"]
+
+ return queryDict
+
+
+class EmailAuthTable:
+ table_nm = "tb_email_athn_info"
+ key_column = "email"
+
+ @staticmethod
+ def get_query_data(user_id: str) -> Dict:
+ return {
+ "table_nm": EmailAuthTable.table_nm,
+ "where_info": [
+ {
+ "table_nm": EmailAuthTable.table_nm,
+ "key": EmailAuthTable.key_column,
+ "value": user_id,
+ "compare_op": "=",
+ "op": "",
+ }
+ ],
+ }
+
+ @staticmethod
+ def get_execute_query(data: Dict) -> Dict:
+ queryDict = {
+ "table_nm": EmailAuthTable.table_nm,
+ "key": [EmailAuthTable.key_column],
+ "method": "UPDATE",
+ "data": data,
+ }
+
+ return queryDict
diff --git a/API_SERVICE/login_service/Dockerfile b/API_SERVICE/login_service/app/database/__init__.py
similarity index 100%
rename from API_SERVICE/login_service/Dockerfile
rename to API_SERVICE/login_service/app/database/__init__.py
diff --git a/API_SERVICE/login_service/app/database/conn.py b/API_SERVICE/login_service/app/database/conn.py
new file mode 100644
index 00000000..a3b7b298
--- /dev/null
+++ b/API_SERVICE/login_service/app/database/conn.py
@@ -0,0 +1,7 @@
+from sqlalchemy.ext.automap import automap_base
+
+from libs.database.orm import SQLAlchemyConnector
+
+Base = automap_base()
+
+db = SQLAlchemyConnector(Base)
diff --git a/API_SERVICE/login_service/app/main.py b/API_SERVICE/login_service/app/main.py
new file mode 100644
index 00000000..4d2079d0
--- /dev/null
+++ b/API_SERVICE/login_service/app/main.py
@@ -0,0 +1,32 @@
+import uvicorn
+from fastapi import FastAPI
+from libs.auth.keycloak import keycloak
+from login_service.app.routes.v1 import auth as authV1
+from login_service.app.routes.v2 import auth as authV2
+from login_service.app.routes.v2 import iris_sso
+from login_service.app.common.config import settings
+from login_service.app.database.conn import db
+
+import logging
+
+logger = logging.getLogger()
+
+
+def create_app():
+ app_ = FastAPI()
+ print(settings.dict())
+ db.init_app(app_, **settings.dict())
+ keycloak.set_url(settings.KEYCLOAK_INFO.keycloak_url)
+
+ app_.include_router(authV1.router, prefix="/portal/api/common")
+ app_.include_router(authV2.router, prefix="/portal/api/common")
+ app_.include_router(iris_sso.router, prefix="/portal/api/common")
+
+ return app_
+
+
+app = create_app()
+
+
+if __name__ == "__main__":
+ uvicorn.run("main:app", host="0.0.0.0", port=8090, reload=True)
diff --git a/API_SERVICE/login_service/database/models.py b/API_SERVICE/login_service/app/routes/__init__.py
similarity index 100%
rename from API_SERVICE/login_service/database/models.py
rename to API_SERVICE/login_service/app/routes/__init__.py
diff --git a/API_SERVICE/meta_service/Dockerfile b/API_SERVICE/login_service/app/routes/v1/__init__.py
similarity index 100%
rename from API_SERVICE/meta_service/Dockerfile
rename to API_SERVICE/login_service/app/routes/v1/__init__.py
diff --git a/API_SERVICE/login_service/app/routes/v1/auth.py b/API_SERVICE/login_service/app/routes/v1/auth.py
new file mode 100644
index 00000000..393bbdf8
--- /dev/null
+++ b/API_SERVICE/login_service/app/routes/v1/auth.py
@@ -0,0 +1,295 @@
+import json
+import logging
+from ast import literal_eval
+from datetime import datetime
+from typing import Optional, Union
+
+import bcrypt
+from fastapi import APIRouter, Depends, Request
+from pydantic import BaseModel
+from starlette.responses import JSONResponse
+
+from libs.auth.keycloak import keycloak
+from libs.database.connector import Executor
+from login_service.app.common.config import settings
+from login_service.app.common.const import COOKIE_NAME, LoginTable, RegisterTable
+from login_service.app.database.conn import db
+
+logger = logging.getLogger()
+
+
+class CreateKeycloakFailError(Exception):
+ ...
+
+
+class LoginInfoWrap(BaseModel):
+ """
+ 기존 파리미터 인터페이스와 맞추기 위해 wrap 후 유효 데이터를 삽입
+ dict를 그대로 사용할 수도 있으나, 개발 편의상 자동완성을 위해 LoginInfo 객체를 생성
+ """
+
+ class LoginInfo(BaseModel):
+ user_id: str
+ user_password: str
+ login_type: str
+
+ data: LoginInfo
+
+
+class RegisterInfoWrap(BaseModel):
+ """
+ 기존 파리미터 인터페이스와 맞추기 위해 wrap 후 유효 데이터를 삽입
+ dict를 그대로 사용할 수도 있으나, 개발 편의상 자동완성을 위해 RegisterInfo 객체를 생성
+ """
+
+ class RegisterInfo(BaseModel):
+ user_id: str
+ user_password: str
+ login_type: Optional[str] = "MEMBER"
+ user_type: Optional[str] = "GENL"
+ user_sttus: Optional[str] = "SBSCRB"
+ user_nm: Optional[str]
+ email: Optional[str]
+ moblphon: Optional[str]
+ blng_org_cd: Optional[str] = None
+ blng_org_nm: Optional[str] = None
+ blng_org_desc: Optional[str] = None
+ service_terms_yn: Optional[str] = "Y"
+ pwd_fail_tms: Optional[int]
+ login_fail_date: Optional[datetime]
+ last_login_date: Optional[datetime]
+ reg_date: Union[datetime, str] = "NOW()"
+ amd_date: Union[datetime, str] = datetime.now()
+ user_uuid: Optional[str]
+ reg_user: Optional[str]
+ amd_user: Optional[str]
+ user_role: Optional[str]
+ user_normal: Optional[str]
+ adm_yn: Optional[str]
+
+ data: RegisterInfo
+
+
+router = APIRouter()
+
+
+@router.post("/user/commonRegister")
+async def register(params: RegisterInfoWrap, session: Executor = Depends(db.get_db)):
+ param = params.data
+ param.user_normal = param.user_password
+ param.user_password = bcrypt.hashpw(param.user_password.encode("utf-8"), bcrypt.gensalt()).decode(encoding="utf-8")
+ try:
+ logger.info(params)
+ row = session.query(**LoginTable.get_query_data(param.user_id)).first()
+ logger.info(f"row:: {row}")
+ if row:
+ return JSONResponse(status_code=200, content={"result": 1, "errorMessage": "Already registered"})
+
+ session.execute(auto_commit=False, **RegisterTable.get_query_data(param.dict()))
+
+ await create_keycloak_user(**param.dict())
+
+ session.commit()
+ return JSONResponse(status_code=200, content={"result": 1, "errorMessage": ""})
+ except Exception as e:
+ session.rollback()
+ logger.error(e, exc_info=True)
+ return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
+
+
+@router.post("/user/commonLogin")
+async def login(params: LoginInfoWrap, session: Executor = Depends(db.get_db)) -> JSONResponse:
+ """
+ keycloak 인중 후 토큰 발급
+ table data
+ {
+ 'user_id': 'swyang',
+ 'user_password': '$2b$12$eL47K7Pi5.Ee9GCTftZ1GuwFMO96jFltAuhnMvropsu/JtyzB26UO',
+ 'login_type': None,
+ 'user_type': None,
+ 'user_sttus': None,
+ 'user_nm': 'seok',
+ 'email': 'test@test.com',
+ 'moblphon': None,
+ 'blng_org_cd': None,
+ 'blng_org_nm': None,
+ 'blng_org_desc': None,
+ 'service_terms_yn': None,
+ 'pwd_fail_tms': None,
+ 'login_fail_date': None,
+ 'last_login_date': None,
+ 'reg_date': None,
+ 'amd_date': None,
+ 'user_uuid': None,
+ 'reg_user': None,
+ 'amd_user': None,
+ 'user_role': None,
+ 'user_normal': 'zxcv1234!',
+ 'adm_yn': None
+ }
+
+ Args:
+ params (LoginInfoWrap): _description_
+ session (Executor, optional): _description_. Defaults to Depends(db.get_db).
+
+ Returns:
+ JSONResponse: _description_
+ """
+ param = params.data
+ try:
+ row = session.query(**LoginTable.get_query_data(param.user_id)).first()
+ logger.debug(row)
+ # 보안 때문에
+ if param.login_type == "member":
+ check_pw = bcrypt.checkpw(param.user_password.encode("utf-8"), row["user_password"].encode("utf-8"))
+ else:
+ check_pw = True
+
+ if not row or not check_pw:
+ return JSONResponse(status_code=400, content={"result": 0, "errorMessage": "id or password not found"})
+
+ token = await get_normal_token(grant_type="password", username=param.user_id, password=param.user_password)
+ logger.info(f"token :: {token}")
+ if token["status_code"] == 401:
+ await create_keycloak_user(**row)
+ token = await get_normal_token(grant_type="password", username=param.user_id, password=param.user_password)
+
+ response = JSONResponse(status_code=200, content={"result": 1, "errorMessage": ""})
+ response.set_cookie(key=COOKIE_NAME, value=token)
+ return response
+ except Exception as e:
+ logger.error(e, exc_info=True)
+ logger.error(f"data :: {params}")
+ return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
+
+
+@router.get("/user/commonUserInfo")
+async def info(request: Request, session: Executor = Depends(db.get_db)):
+ """
+ {
+ "result": 1,
+ "errorMessage": "",
+ "data": {
+ "body": {
+ "user_id": "admin@test.com",
+ "email": "admin@test.com",
+ "login_type": "MEMBER",
+ "moblphon": "010-1111-1112",
+ "user_nm": "관리자",
+ "user_type": "GENL",
+ "user_role": "ROLE_USER|ROLE_ADMIN",
+ "user_uuid": "6d77d874-e613-480f-8e86-dba491c28167",
+ "blng_org_cd": "None",
+ "blng_org_nm": "None",
+ "blng_org_desc": "None",
+ "exp": 1689053022
+ }
+ }
+ }
+
+ Args:
+ request (Request): _description_
+ session (Executor, optional): _description_. Defaults to Depends(db.get_db).
+ """
+ token = request.cookies.get(COOKIE_NAME)
+ if not token:
+ msg = "TokenDoesNotExist"
+ logger.info(msg)
+ return JSONResponse(status_code=400, content={"result": 0, "errorMessage": msg})
+
+ token = literal_eval(token)
+ username = await username_from_token(token["data"]["access_token"])
+ # keycloak API가 token을 소문자로 저장, DB 조회 코드를 소문자로 변경
+ login_table = LoginTable.get_query_data(username)
+ login_table["where_info"][0]["compare_op"] = "ilike"
+ row = session.query(**login_table).first()
+ row.pop("user_password")
+
+ return JSONResponse(
+ status_code=200,
+ content={"result": 1, "errorMessage": "", "data": {"body": row}},
+ )
+
+
+@router.post("/user/commonLogout")
+async def logout():
+ response = JSONResponse(status_code=200, content={"result": 1, "errorMessage": ""})
+ response.delete_cookie(COOKIE_NAME, domain="bigdata-car.kr")
+ return response
+
+
+async def get_admin_token() -> None:
+ res = await keycloak.generate_admin_token(
+ username=settings.KEYCLOAK_INFO.admin_username,
+ password=settings.KEYCLOAK_INFO.admin_password,
+ grant_type="password",
+ )
+
+ return res.get("data").get("access_token")
+
+
+async def create_keycloak_user(password, **kwargs):
+ admin_token = await get_admin_token()
+ reg_data = {
+ "username": kwargs["user_id"],
+ "firstName": kwargs["user_nm"],
+ "email": kwargs["email"],
+ "emailVerified": True,
+ "enabled": True,
+ "credentials": [{"value": password}],
+ "attributes": json.dumps(kwargs, default=str),
+ }
+ res = await keycloak.create_user(token=admin_token, realm=settings.KEYCLOAK_INFO.realm, **reg_data)
+ logger.info(f"res :: {res}")
+ if res["status_code"] != 201:
+ raise CreateKeycloakFailError(f"CreateKeycloakFailError :: {res}")
+
+
+async def get_normal_token(**kwargs):
+ return await keycloak.generate_normal_token(
+ realm=settings.KEYCLOAK_INFO.realm,
+ client_id=settings.KEYCLOAK_INFO.client_id,
+ client_secret=settings.KEYCLOAK_INFO.client_secret,
+ grant_type=kwargs.pop("grant_type", "password"),
+ **kwargs,
+ )
+
+
+async def username_from_token(access_token: str):
+ res = await keycloak.user_info(
+ realm=settings.KEYCLOAK_INFO.realm,
+ token=access_token,
+ )
+ logger.info(f"token info res :: {res}")
+ return res["data"]["preferred_username"]
+
+
+async def delete_user(**kwargs):
+ """
+ keycloak delete api 호출
+ params:
+ user_id: str
+ """
+ admin_token = await get_admin_token()
+ res = await keycloak.delete_user(
+ token=admin_token, realm=settings.KEYCLOAK_INFO.realm, user_id=kwargs.get("user_id")
+ )
+ logger.info(f"delete res :: {res}")
+
+
+"""
+token
+{
+ 'status_code': 200,
+ 'data': {
+ 'access_token': '',
+ 'expires_in': 300,
+ 'refresh_expires_in': 1800,
+ 'refresh_token': '',
+ 'token_type': 'Bearer',
+ 'not-before-policy': 0,
+ 'session_state': 'e20bcf24-8066-40c8-8b13-652c74cb6b9b',
+ 'scope': 'profile email'
+ }
+}
+"""
diff --git a/API_SERVICE/meta_service/common/const.py b/API_SERVICE/login_service/app/routes/v2/__init__.py
similarity index 100%
rename from API_SERVICE/meta_service/common/const.py
rename to API_SERVICE/login_service/app/routes/v2/__init__.py
diff --git a/API_SERVICE/login_service/app/routes/v2/auth.py b/API_SERVICE/login_service/app/routes/v2/auth.py
new file mode 100644
index 00000000..d58e287c
--- /dev/null
+++ b/API_SERVICE/login_service/app/routes/v2/auth.py
@@ -0,0 +1,1064 @@
+from ast import literal_eval
+import logging
+from datetime import datetime
+from typing import Optional, Union
+
+import bcrypt
+import aiohttp
+from fastapi import APIRouter, Depends, Request
+from pydantic import BaseModel
+from starlette.responses import JSONResponse
+from libs.auth.keycloak import keycloak
+
+from libs.database.connector import Executor
+from login_service.app.common.config import settings
+from login_service.app.common.const import COOKIE_NAME, LoginTable, RegisterTable, EmailAuthTable
+from login_service.app.database.conn import db
+
+"""'
+ Status Code :
+ 200 : OK
+ 201 : Created => create
+ 202 : Accepted
+ 204 : No Content => modify
+"""
+
+
+logger = logging.getLogger()
+
+
+class CreateKeycloakFailError(Exception):
+ ...
+
+
+class EmailAuthFail(Exception):
+ ...
+
+
+class AdminAuthFail(Exception):
+ ...
+
+
+class QueryInfoWrap(BaseModel):
+ """
+ 기존 파리미터 인터페이스와 맞추기 위해 wrap 후 유효 데이터를 삽입
+ dict를 그대로 사용할 수도 있으나, 개발 편의상 자동완성을 위해 LoginInfo 객체를 생성
+ """
+
+ class QueryInfo(BaseModel):
+ query: str
+
+ data: QueryInfo
+
+
+class LoginInfoWrap(BaseModel):
+ class LoginInfo(BaseModel):
+ user_id: str
+ user_password: str
+
+ data: LoginInfo
+
+
+class LoginAuthInfoWrap(BaseModel):
+ class LoginAuthInfo(BaseModel):
+ code: str
+ scope: str
+ redirect_uri: str
+
+ data: LoginAuthInfo
+
+
+class RegisterInfoWrap(BaseModel):
+ class RegisterInfo(BaseModel):
+ user_id: str
+ user_password: Optional[str]
+ login_type: Optional[str] = "MEMBER"
+ user_type: Optional[str] = "GENL"
+ user_sttus: Optional[str] = "SBSCRB"
+ user_nm: Optional[str]
+ email: Optional[str]
+ moblphon: Optional[str]
+ blng_org_cd: Optional[str] = None
+ blng_org_nm: Optional[str] = None
+ blng_org_desc: Optional[str] = None
+ service_terms_yn: Optional[str] = "Y"
+ pwd_fail_tms: Optional[int]
+ login_fail_date: Optional[datetime]
+ last_login_date: Optional[datetime]
+ reg_date: Union[datetime, str] = "NOW()"
+ amd_date: Union[datetime, str] = datetime.now()
+ user_uuid: Optional[str]
+ reg_user: Optional[str]
+ amd_user: Optional[str]
+ user_role: Optional[str]
+ user_normal: Optional[str]
+ adm_yn: Optional[str]
+ enabled: Optional[str]
+ sub: Optional[str]
+ openstack_default_project: Optional[str]
+ openstack_user_domain: Optional[str]
+ limit_cpu: Optional[str]
+ limit_mem: Optional[str]
+ limit_app_count: Optional[str]
+ company: Optional[str]
+ companyImagePath: Optional[str]
+
+ data: RegisterInfo
+
+
+class RegisterSocialInfoWrap(BaseModel):
+ class RegisterSocialInfo(BaseModel):
+ social_type: str
+ social_id: Optional[str]
+ social_email: Optional[str]
+ access_token: Optional[str]
+
+ data: RegisterSocialInfo
+
+
+class ActivateInfoWrap(BaseModel):
+ class ActivateInfo(BaseModel):
+ user_id: str
+ athn_no: str
+
+ data: ActivateInfo
+
+
+class UserInfoWrap(BaseModel):
+ class UserInfo(BaseModel):
+ user_id: str
+
+ data: UserInfo
+
+
+class PasswordInfoWrap(BaseModel):
+ class PasswordInfo(BaseModel):
+ user_id: str
+ athn_no: str
+ new_password: str
+
+ data: PasswordInfo
+
+
+class PurchaseInfoWrap(BaseModel):
+ class PurchaseInfo(BaseModel):
+ data_id: str
+
+ data: PurchaseInfo
+
+
+class ClientInfoWrap(BaseModel):
+ class ClientInfo(BaseModel):
+ client_name: str
+
+ data: ClientInfo
+
+
+class ClientRoleWrap(BaseModel):
+ class ClientRole(BaseModel):
+ client_sub: str
+
+ data: ClientRole
+
+
+class ClientRoleMappingWrap(BaseModel):
+ class ClientRoleMapping(BaseModel):
+ user_id: str
+ client_sub: str
+ role_sub: str
+ role_name: str
+
+ data: ClientRoleMapping
+
+
+router = APIRouter()
+
+
+@router.post("/user/v2/commonLogout")
+async def logout():
+ response = JSONResponse(status_code=200, content={"result": 1, "errorMessage": ""})
+ response.delete_cookie(COOKIE_NAME)
+ # studio cookie 삭제
+ response.delete_cookie("x-access-token")
+ return response
+
+
+@router.post("/user/v2/commonLogoutKeyCloak")
+async def logout_keycloak(request: Request):
+ response = JSONResponse(status_code=200, content={"result": 1, "errorMessage": ""})
+ token = request.cookies.get(COOKIE_NAME)
+ if not token:
+ msg = "TokenDoesNotExist"
+ logger.info(msg)
+ return JSONResponse(status_code=400, content={"result": 0, "errorMessage": msg})
+
+ token = literal_eval(token)
+ refresh_token = token["data"]["refresh_token"]
+ logger.info(refresh_token)
+ res = await keycloak_logout(refresh_token=refresh_token)
+ logger.info(f"res :: {res}")
+
+ if res.get("status_code") != 204:
+ msg = res.get("data").get("error_description")
+ return JSONResponse(status_code=400, content={"result": 0, "errorMessage": msg})
+
+ response.delete_cookie(COOKIE_NAME)
+ # studio cookie 삭제
+ response.delete_cookie("x-access-token")
+ return response
+
+
+@router.get("/user/v2/commonUserInfo")
+async def info(request: Request, session: Executor = Depends(db.get_db)):
+ """
+ {
+ "result": 1,
+ "errorMessage": "",
+ "data": {
+ "body": {
+ "user_id": "admin@test.com",
+ "email": "admin@test.com",
+ "login_type": "MEMBER",
+ "moblphon": "010-1111-1112",
+ "user_nm": "관리자",
+ "user_type": "GENL",
+ "user_role": "ROLE_USER|ROLE_ADMIN",
+ "user_uuid": "6d77d874-e613-480f-8e86-dba491c28167",
+ "blng_org_cd": "None",
+ "blng_org_nm": "None",
+ "blng_org_desc": "None",
+ "exp": 1689053022
+ }
+ }
+ }
+
+ Args:
+ request (Request): _description_
+ session (Executor, optional): _description_. Defaults to Depends(db.get_db).
+ """
+ userInfo = await get_user_info_from_request(request)
+
+ if userInfo.get("status_code") == 200:
+ return JSONResponse(
+ status_code=200,
+ content={"result": 1, "errorMessage": "", "data": {"body": userInfo.get("data")}},
+ )
+ else:
+ return JSONResponse(
+ status_code=400,
+ content={"result": 0, "errorMessage": userInfo.get("data").get("error_description")},
+ )
+
+
+@router.get("/user/v2/commonUserUpsert")
+async def register(request: Request, session: Executor = Depends(db.get_db)):
+ """
+ {
+ "result": 1,
+ "errorMessage": "",
+ "data": {
+ "body": {
+ "user_id": "admin@test.com",
+ "email": "admin@test.com",
+ "login_type": "MEMBER",
+ "moblphon": "010-1111-1112",
+ "user_nm": "관리자",
+ "user_type": "GENL",
+ "user_role": "ROLE_USER|ROLE_ADMIN",
+ "user_uuid": "6d77d874-e613-480f-8e86-dba491c28167",
+ "blng_org_cd": "None",
+ "blng_org_nm": "None",
+ "blng_org_desc": "None",
+ "exp": 1689053022
+ }
+ }
+ }
+
+ Args:
+ request (Request): _description_
+ session (Executor, optional): _description_. Defaults to Depends(db.get_db).
+ """
+ userInfo = await get_user_info_from_request(request)
+ userData = userInfo.get("data")
+ userId = userData.get("user_id")
+
+ if userId is None:
+ msg = userInfo.get("data").get("error_description")
+ logger.info(msg)
+ return JSONResponse(status_code=400, content={"result": 0, "errorMessage": msg})
+
+ userParam = {
+ "keycloak_uuid": userData.get("sub"),
+ "user_uuid": userData.get("user_uuid"),
+ "user_id": userData.get("user_id"),
+ "user_nm": userData.get("user_nm"),
+ "email": userData.get("email"),
+ "moblphon": userData.get("moblphon"),
+ "user_type": userData.get("user_type"),
+ "login_type": userData.get("login_type"),
+ "user_role": userData.get("user_role"),
+ "adm_yn": userData.get("adm_yn"),
+ "user_sttus": userData.get("user_sttus"),
+ "blng_org_cd": userData.get("blng_org_cd"),
+ "blng_org_nm": userData.get("blng_org_nm"),
+ "blng_org_desc": userData.get("blng_org_desc"),
+ "service_terms_yn": userData.get("service_terms_yn"),
+ "reg_user": userData.get("reg_user"),
+ "reg_date": userData.get("reg_date"),
+ "amd_user": userData.get("amd_user"),
+ "amd_date": userData.get("amd_date"),
+ }
+
+ return await user_upsert(session, **userParam)
+
+
+@router.post("/user/v2/commonAdminUserUpsert")
+async def admin_register(request: Request, params: RegisterInfoWrap, session: Executor = Depends(db.get_db)):
+ param = params.data
+ try:
+ await check_admin(request)
+
+ user_data = {
+ "keycloak_uuid": param.sub,
+ "user_uuid": param.user_uuid,
+ "user_id": param.user_id,
+ "user_nm": param.user_nm,
+ "email": param.email,
+ "moblphon": param.moblphon,
+ "user_type": param.user_type,
+ "login_type": param.login_type,
+ "user_role": param.user_role,
+ "adm_yn": param.adm_yn,
+ "user_sttus": param.user_sttus,
+ "blng_org_cd": param.blng_org_cd,
+ "blng_org_nm": param.blng_org_nm,
+ "blng_org_desc": param.blng_org_desc,
+ "service_terms_yn": param.service_terms_yn,
+ "reg_user": param.reg_user,
+ "reg_date": param.reg_date.strftime("%Y-%m-%d %H:%M:%S"),
+ "amd_user": param.amd_user,
+ "amd_date": param.amd_date.strftime("%Y-%m-%d %H:%M:%S"),
+ }
+
+ return await user_upsert(session, **user_data)
+ except AdminAuthFail as ae:
+ return JSONResponse(status_code=400, content={"result": 0, "errorMessage": str(ae)})
+ except Exception as e:
+ session.rollback()
+ logger.error(e, exc_info=True)
+ return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
+
+
+@router.post("/user/v2/commonLogin")
+async def login(params: LoginInfoWrap) -> JSONResponse:
+ param = params.data
+
+ token = await get_normal_token(grant_type="password", username=param.user_id, password=param.user_password)
+ logger.info(f"token :: {token}")
+
+ if token["status_code"] == 200:
+ response = JSONResponse(status_code=200, content={"result": 1, "errorMessage": ""})
+ token["create_time"] = datetime.now().strftime("%s")
+ response.set_cookie(key=COOKIE_NAME, value=token)
+ return response
+ else:
+ return JSONResponse(
+ status_code=400,
+ content={"result": 0, "errorMessage": token["data"]["error_description"]},
+ )
+
+
+@router.post("/user/v2/commonLoginAuth")
+async def loginAuth(params: LoginAuthInfoWrap) -> JSONResponse:
+ param = params.data
+
+ token = await get_normal_token(
+ grant_type="authorization_code", code=param.code, scope=param.scope, redirect_uri=param.redirect_uri
+ )
+ logger.info(f"token :: {token}")
+
+ if token["status_code"] == 200:
+ response = JSONResponse(status_code=200, content={"result": 1, "errorMessage": ""})
+ token["create_time"] = datetime.now().strftime("%s")
+ response.set_cookie(key=COOKIE_NAME, value=token)
+ return response
+ else:
+ return JSONResponse(
+ status_code=400,
+ content={"result": 0, "errorMessage": token["data"]["error_description"]},
+ )
+
+
+@router.post("/user/v2/commonLoginSocial")
+async def loginSocial(params: RegisterSocialInfoWrap):
+ param = params.data
+
+ token = await get_social_token(**param.dict())
+ if token["status_code"] == 200:
+ token["create_time"] = datetime.now().strftime("%s")
+ response = JSONResponse(status_code=200, content={"result": 1, "errorMessage": ""})
+ response.set_cookie(key=COOKIE_NAME, value=token)
+ return response
+ else:
+ return JSONResponse(
+ status_code=400,
+ content={"result": 0, "errorMessage": token["data"]["error_description"]},
+ )
+
+
+@router.post("/user/v2/commonSocialLink")
+async def socialLink(params: RegisterSocialInfoWrap):
+ param = params.data
+ social_email = param.social_email
+
+ admin_token = await get_admin_token()
+ res = await keycloak.get_query(
+ token=admin_token, realm=settings.KEYCLOAK_INFO.realm, query=f"username={social_email}&exact=true"
+ )
+
+ userList = res.get("data")
+ if len(userList) == 0:
+ return JSONResponse(status_code=400, content={"result": 0, "errorMessage": "Invalid User!!"})
+
+ logger.info(f"res :: {res}")
+ user_info = userList[0]
+ sub = user_info.get("id")
+
+ token = await keycloak.social_link(token=admin_token, realm=settings.KEYCLOAK_INFO.realm, sub=sub, **param.dict())
+ logger.info(f"token :: {token}")
+
+ if token["status_code"] == 204:
+ response = JSONResponse(status_code=200, content={"result": 1, "errorMessage": ""})
+ return response
+ else:
+ return JSONResponse(
+ status_code=400,
+ content={"result": 0, "errorMessage": token["data"]["error_description"]},
+ )
+
+
+@router.post("/user/v2/commonLoginDB")
+async def loginDB(params: LoginInfoWrap, session: Executor = Depends(db.get_db)) -> JSONResponse:
+ param = params.data
+
+ check_pw = True
+ try:
+ row = session.query(**LoginTable.get_query_data(param.user_id)).first()
+ check_pw = bcrypt.checkpw(param.user_password.encode("utf-8"), row["user_password"].encode("utf-8"))
+
+ if row and check_pw:
+ return JSONResponse(status_code=200, content={"result": 1, "errorMessage": "", "data": {"body": row}})
+ else:
+ return JSONResponse(status_code=200, content={"result": 0, "errorMessage": "no user"})
+ except Exception as e:
+ logger.error(e, exc_info=True)
+ logger.error(f"data :: {params}")
+ return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
+
+
+@router.post("/user/v2/commonRegisterNormal")
+async def registerNormal(params: RegisterInfoWrap, session: Executor = Depends(db.get_db)):
+ param = params.data
+ try:
+ await create_keycloak_user(**param.dict())
+ return JSONResponse(status_code=200, content={"result": 1, "errorMessage": ""})
+ except Exception as e:
+ session.rollback()
+ logger.error(e, exc_info=True)
+ return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
+
+
+@router.post("/user/v2/commonActivateUser")
+async def activateUser(params: ActivateInfoWrap, session: Executor = Depends(db.get_db)):
+ param = params.data
+ user_id = param.user_id
+ athn_no = param.athn_no
+ logger.info(param)
+ try:
+ await check_email_auth(user_id, athn_no, session)
+ # enabled 만 True 로 변경
+ reg_data = {"enabled": "true"}
+ return await alter_user_info(user_id, "SBSCRB", **reg_data)
+ except Exception as e:
+ session.rollback()
+ logger.error(e, exc_info=True)
+ return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
+
+
+@router.post("/user/v2/commonKeyCloakQuery")
+async def getCount(params: QueryInfoWrap, session: Executor = Depends(db.get_db)):
+ param = params.data
+ query = param.query
+ try:
+ res = await get_query_keycloak(query)
+ logger.info(res)
+ objectCount = len(res.get("data"))
+ return JSONResponse(status_code=200, content={"result": 1, "errorMessage": "", "data": objectCount})
+ except Exception as e:
+ session.rollback()
+ logger.error(e, exc_info=True)
+ return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
+
+
+@router.post("/user/v2/commonUserModify")
+async def modify(request: Request, params: RegisterInfoWrap):
+ userInfo = await get_user_info_from_request(request)
+ userInfo = userInfo.get("data")
+ userId = userInfo.get("preferred_username")
+ if userId is None:
+ return JSONResponse(status_code=400, content={"result": 0, "errorMessage": "Invalid User"})
+
+ param = params.data
+ param.sub = userInfo.get("sub")
+ return await modify_keycloak_user(**param.dict())
+
+
+@router.post("/user/v2/commonAdminGetUserInfo")
+async def adminGetUser(request: Request, params: UserInfoWrap):
+ param = params.data
+ userName = param.user_id
+ try:
+ await check_admin(request)
+
+ admin_token = await get_admin_token()
+ res = await keycloak.get_query(
+ token=admin_token, realm=settings.KEYCLOAK_INFO.realm, query=f"username={userName}&exact=true"
+ )
+
+ userList = res.get("data")
+ if len(userList) != 0:
+ return JSONResponse(status_code=200, content={"result": 1, "errorMessage": "", "data": userList})
+ else:
+ return JSONResponse(status_code=400, content={"result": 0, "errorMessage": "Invalid User!!"})
+ except Exception as e:
+ logger.error(e, exc_info=True)
+ return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
+
+
+@router.post("/user/v2/commonAdminModifyUser")
+async def adminModifyUser(request: Request, params: RegisterInfoWrap):
+ param = params.data
+ try:
+ await check_admin(request)
+ return await modify_keycloak_user(**param.dict())
+ except Exception as e:
+ logger.error(e, exc_info=True)
+ return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
+
+
+@router.get("/user/v2/commonCheckSocialType")
+async def adminCheckSocialtype(request: Request):
+ try:
+ admin_token = await get_admin_token()
+ userInfo = await get_user_info_from_request(request)
+ userInfo = userInfo.get("data")
+ if userInfo is None:
+ return JSONResponse(status_code=400, content={"result": 0, "errorMessage": "Invalid User!!"})
+
+ sub = userInfo.get("sub")
+ logger.info(f"userInfo :: {userInfo}")
+ res = await keycloak.check_idp(token=admin_token, realm=settings.KEYCLOAK_INFO.realm, sub=sub)
+ return JSONResponse(status_code=200, content={"result": 1, "errorMessage": "", "data": res.get("data")})
+ except Exception as e:
+ logger.error(e, exc_info=True)
+ return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
+
+
+@router.post("/user/v2/commonNewPassword")
+async def userNewPassword(params: PasswordInfoWrap, session: Executor = Depends(db.get_db)):
+ param = params.data
+ user_id = param.user_id
+ athn_no = param.athn_no
+ new_password = param.new_password
+ logger.info(param)
+ try:
+ await check_email_auth(user_id, athn_no, session)
+ # credentials 만 변경
+ reg_data = {"credentials": [{"value": new_password}]}
+ return await alter_user_info(user_id, **reg_data)
+ except Exception as e:
+ session.rollback()
+ logger.error(e, exc_info=True)
+ return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
+
+
+@router.post("/user/v2/checkPurchase")
+async def checkPurchase(params: PurchaseInfoWrap, request: Request):
+ params = params.data
+ data_id = params.data_id
+ token = request.cookies.get(COOKIE_NAME)
+
+ if not token:
+ msg = "TokenDoesNotExist"
+ logger.info(msg)
+ return JSONResponse(status_code=400, content={"result": 0, "errorMessage": msg})
+
+ token = literal_eval(token)
+ access_token = token["data"]["access_token"]
+ api_url = f"https://market.bigdata-car.kr/api/v1/purchase-status/{data_id}"
+ headers = {"Content-Type": "application/json", "Authorization": "Bearer " + access_token}
+ async with aiohttp.ClientSession() as session:
+ async with session.request(url=api_url, method="GET", headers=headers) as response:
+ try:
+ ret = await response.json()
+ except Exception:
+ ret = await response.read()
+ # {'purchaseStatus': False}
+ ret = ret.get("purchaseStatus")
+ return {"status_code": response.status, "data": "Y" if ret else "N"}
+
+
+@router.post("/user/v2/checkClientInfo")
+async def checkClientInfo(params: ClientInfoWrap):
+ params = params.data
+ client_name = params.client_name
+ try:
+ admin_token = await get_admin_token()
+ res = await keycloak.check_client_id(token=admin_token, realm=settings.KEYCLOAK_INFO.realm)
+ client_list = res.get("data")
+ client_info = list(filter(lambda item: item["clientId"] == client_name, client_list))
+ logger.info(f"client_info :: {client_info}")
+ if len(client_info) == 0:
+ return JSONResponse(status_code=400, content={"result": 0, "errorMessage": "Invalid Client Name!!"})
+ return JSONResponse(status_code=200, content={"result": 1, "data": client_info})
+ except Exception as e:
+ logger.error(e, exc_info=True)
+ return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
+
+
+@router.post("/user/v2/checkClientRole")
+async def checkClientRole(params: ClientRoleWrap):
+ params = params.data
+ client_sub = params.client_sub
+ try:
+ admin_token = await get_admin_token()
+ res = await keycloak.check_client_role(
+ token=admin_token, realm=settings.KEYCLOAK_INFO.realm, client_sub=client_sub
+ )
+ client_role = res.get("data")
+ logger.info(f"client_role :: {client_role}")
+ return JSONResponse(status_code=200, content={"result": 1, "data": client_role})
+ except Exception as e:
+ logger.error(e, exc_info=True)
+ return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
+
+
+@router.post("/user/v2/setRoleMapping")
+async def setRoleMapping(params: ClientRoleMappingWrap):
+ params = params.data
+ user_id = params.user_id
+
+ try:
+ admin_token = await get_admin_token()
+ res = await keycloak.get_query(
+ token=admin_token, realm=settings.KEYCLOAK_INFO.realm, query=f"username={user_id}&exact=true"
+ )
+
+ userList = res.get("data")
+ if len(userList) == 0:
+ return JSONResponse(status_code=400, content={"result": 0, "errorMessage": "Invalid User!!"})
+
+ user_info = userList[0]
+ user_sub = user_info.get("id")
+ kwargs = {
+ "user_sub": user_sub,
+ "client_sub": params.client_sub,
+ "role_sub": params.role_sub,
+ "role_name": params.role_name,
+ }
+
+ resToken = await keycloak.set_client_role_mapping(
+ token=admin_token, realm=settings.KEYCLOAK_INFO.realm, **kwargs
+ )
+ if resToken["status_code"] == 204:
+ return JSONResponse(status_code=200, content={"result": 1, "errorMessage": ""})
+ else:
+ return JSONResponse(status_code=400, content={"result": 0, "errorMessage": resToken["data"]})
+ except Exception as e:
+ logger.error(e, exc_info=True)
+ return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
+
+
+@router.get("/user/v2/getUyuniRole")
+async def getUyuniRole(request: Request):
+ token = request.cookies.get(COOKIE_NAME)
+
+ if not token:
+ msg = "TokenDoesNotExist"
+ logger.info(msg)
+ return JSONResponse(status_code=400, content={"result": 0, "errorMessage": msg})
+
+ try:
+ token = literal_eval(token)
+ access_token = token["data"]["access_token"]
+ res = await get_token_info(token=access_token)
+ if res["status_code"] == 200:
+ return JSONResponse(status_code=200, content={"result": 1, "data": res["data"]})
+ else:
+ return JSONResponse(
+ status_code=400,
+ content={"result": 0, "errorMessage": res["data"]["error_description"]},
+ )
+ except Exception as e:
+ logger.error(e, exc_info=True)
+ return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
+
+
+async def check_admin(request: Request):
+ resToken = await get_user_info_from_request(request)
+ logger.info(resToken)
+ if resToken["status_code"] != 200:
+ raise AdminAuthFail("Required Admin Role")
+
+ userInfo = resToken.get("data")
+ userRoleList = [val.strip() for val in userInfo.get("user_role").split("|")]
+
+ if "ROLE_ADMIN" not in userRoleList:
+ raise AdminAuthFail("Required Admin Role")
+
+
+async def user_upsert(session: Executor, **kwargs):
+ method = "INSERT"
+ row = session.query(**LoginTable.get_query_data(kwargs.get("user_id"))).first()
+ if row:
+ method = "UPDATE"
+ try:
+ logger.info(kwargs)
+ session.execute(auto_commit=False, **RegisterTable.upsert_query_data(method, kwargs))
+ session.commit()
+ return JSONResponse(status_code=200, content={"result": 1, "errorMessage": ""})
+ except Exception as e:
+ session.rollback()
+ logger.error(e, exc_info=True)
+ return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
+
+
+async def alter_user_info(user_id: str, user_sttus: str = None, **kwargs):
+
+ """
+
+ [
+ {
+ 'id': '0bb4fcf6-62f2-46c2-a97d-665e7723f69d',
+ 'createdTimestamp': 1694062222925,
+ 'username': 'conodof447@docwl.com',
+ 'enabled': True,
+ 'totp': False,
+ 'emailVerified': True,
+ 'firstName': 'TEST',
+ 'email': 'conodof447@docwl.com',
+ 'attributes': {
+ 'login_type': ['MEMBER'],
+ 'reg_user': ['459b95f5-0a82-4318-866f-0aba85d59897'],
+ 'user_nm': ['TEST'],
+ 'user_sttus': ['SBSCRB'],
+ 'adm_yn': ['N'],
+ 'user_role': ['ROLE_USER'],
+ 'reg_date': ['2023-09-07 13: 50: 22'],
+ 'user_uuid': ['459b95f5-0a82-4318-866f-0aba85d59897'],
+ 'amd_date': ['2023-09-07 13: 50: 22'],
+ 'user_type': ['GENL'],
+ 'user_id': ['conodof447@docwl.com'],
+ 'moblphon': ['010-1111-0000'],
+ 'amd_user': ['459b95f5-0a82-4318-866f-0aba85d59897'],
+ 'service_terms_yn': ['Y'],
+ 'openstack-default-project' : Optional[str],
+ 'openstack-user-domain' : Optional[str],
+ 'limit_cpu': Optional[str],
+ 'limit_mem': Optional[str],
+ 'limit_app_count': Optional[str]
+ 'company': Optional[str]
+ 'companyImagePath': Optional[str]
+ }
+ }
+ ]
+ """
+
+ try:
+ admin_token = await get_admin_token()
+ res = await keycloak.get_query(token=admin_token, realm=settings.KEYCLOAK_INFO.realm, query="")
+ userList = res.get("data")
+ user_info = list(filter(lambda item: item["username"] == user_id, userList))
+ if len(user_info) == 0:
+ return JSONResponse(status_code=400, content={"result": 0, "errorMessage": "Invalid User!!"})
+ user_info = user_info[0]
+ attributes = user_info.get("attributes")
+ sub = user_info.get("id")
+ attributes_user_sttus = attributes.get("user_sttus")[0]
+ openstack_default_project = attributes.get("openstack_default_project")
+ openstack_user_domain = attributes.get("openstack_user_domain")
+ limit_cpu = attributes.get("limit_cpu")
+ limit_mem = attributes.get("limit_mem")
+ limit_app_count = attributes.get("limit_app_count")
+ company = attributes.get("company")
+ companyImagePath = attributes.get("companyImagePath")
+
+ if openstack_default_project is None:
+ openstack_default_project = ""
+ if openstack_user_domain is None:
+ openstack_user_domain = "Default"
+ if limit_cpu is None:
+ limit_cpu = "2"
+ if limit_mem is None:
+ limit_mem = "2048"
+ if limit_app_count is None:
+ limit_app_count = "5"
+ if company is None:
+ company = ""
+ if companyImagePath is None:
+ companyImagePath = ""
+
+ # user_sttus 처리를 위해 attributes 값을 만든다.
+ if user_sttus is not None:
+ attributes_user_sttus = user_sttus
+ kwargs = {
+ **kwargs,
+ "attributes": {
+ "login_type": attributes.get("login_type")[0],
+ "reg_user": attributes.get("reg_user")[0],
+ "user_nm": attributes.get("user_nm")[0],
+ "user_sttus": attributes_user_sttus,
+ "adm_yn": attributes.get("adm_yn")[0],
+ "user_role": attributes.get("user_role")[0],
+ "reg_date": attributes.get("reg_date")[0],
+ "user_uuid": attributes.get("user_uuid")[0],
+ "amd_date": attributes.get("amd_date")[0],
+ "user_type": attributes.get("user_type")[0],
+ "user_id": attributes.get("user_id")[0],
+ "moblphon": attributes.get("moblphon")[0],
+ "amd_user": attributes.get("amd_user")[0],
+ "service_terms_yn": attributes.get("service_terms_yn")[0],
+ "openstack-default-project": openstack_default_project,
+ "openstack-user-domain": openstack_user_domain,
+ "limit_cpu": limit_cpu,
+ "limit_mem": limit_mem,
+ "limit_app_count": limit_app_count,
+ "company": company,
+ "companyImagePath": companyImagePath,
+ },
+ }
+
+ resToken = await keycloak.alter_user(token=admin_token, realm=settings.KEYCLOAK_INFO.realm, sub=sub, **kwargs)
+ logger.info(f"resToken = {resToken}")
+ if resToken["status_code"] == 204:
+ return JSONResponse(status_code=200, content={"result": 1, "errorMessage": ""})
+ else:
+ return JSONResponse(
+ status_code=400, content={"result": 0, "errorMessage": resToken["data"]["error_description"]}
+ )
+ except Exception as e:
+ logger.error(e, exc_info=True)
+ return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
+
+
+async def check_email_auth(user_id: str, athn_no: str, session: Executor):
+ email_info = session.query(**EmailAuthTable.get_query_data(user_id)).first()
+ if email_info["athn_no"] == athn_no:
+ email_info["athn_yn"] = "Y"
+ email_info["athn_date"] = "NOW()"
+ session.execute(auto_commit=False, **EmailAuthTable.get_execute_query(email_info))
+ else:
+ raise EmailAuthFail("EmailAuthFail")
+
+
+async def get_user_info_from_request(request: Request):
+ token = request.cookies.get(COOKIE_NAME)
+
+ if not token:
+ msg = "TokenDoesNotExist"
+ logger.info(msg)
+ return JSONResponse(status_code=400, content={"result": 0, "errorMessage": msg})
+
+ token = literal_eval(token)
+ userInfo = await keycloak.user_info(token=token["data"]["access_token"], realm=settings.KEYCLOAK_INFO.realm)
+ return userInfo
+
+
+async def get_query_keycloak(query):
+ admin_token = await get_admin_token()
+ res = await keycloak.get_query(token=admin_token, realm=settings.KEYCLOAK_INFO.realm, query=query)
+ logger.info(f"res :: {res}")
+ if res["status_code"] != 200:
+ raise CreateKeycloakFailError(f"CreateKeycloakFailError :: {res}")
+
+ return res
+
+
+async def modify_keycloak_user(**kwargs):
+ admin_token = await get_admin_token()
+ openstack_default_project = kwargs.get("openstack_default_project")
+ openstack_user_domain = kwargs.get("openstack_user_domain")
+ limit_cpu = kwargs.get("limit_cpu")
+ limit_mem = kwargs.get("limit_mem")
+ limit_app_count = kwargs.get("limit_app_count")
+ company = kwargs.get("company")
+ companyImagePath = kwargs.get("companyImagePath")
+
+ if openstack_default_project is None:
+ openstack_default_project = ""
+ if openstack_user_domain is None:
+ openstack_user_domain = "Default"
+ if limit_cpu is None:
+ limit_cpu = "2"
+ if limit_mem is None:
+ limit_mem = "2048"
+ if limit_app_count is None:
+ limit_app_count = "5"
+ if company is None:
+ company = ""
+ if companyImagePath is None:
+ companyImagePath = ""
+
+ reg_data = {
+ # key 이름이 "attributes"가 아닌 것은 value가 존재할때만 넣어주어야 함
+ # value가 존재할때만 넣어주어야 함
+ "firstName": kwargs.get("user_nm"), # value가 존재할때만 넣어주어야 함
+ "email": kwargs.get("email"), # value가 존재할때만 넣어주어야 함
+ "credentials": [{"value": kwargs.get("user_password")}],
+ "emailVerified": True, # 항상 true
+ "enabled": kwargs.get("enabled"),
+ # value가 존재하지 않아도 모두 넣어주어야 함
+ "attributes": {
+ "user_uuid": kwargs.get("user_uuid"),
+ "user_id": kwargs.get("user_id"),
+ "user_nm": kwargs.get("user_nm"),
+ "moblphon": kwargs.get("moblphon"),
+ "user_type": kwargs.get("user_type"),
+ "login_type": kwargs.get("login_type"),
+ "user_role": kwargs.get("user_role"),
+ "adm_yn": kwargs.get("adm_yn"),
+ "user_sttus": kwargs.get("user_sttus"),
+ "blng_org_cd": kwargs.get("blng_org_cd"),
+ "blng_org_nm": kwargs.get("blng_org_nm"),
+ "blng_org_desc": kwargs.get("blng_org_desc"),
+ "service_terms_yn": kwargs.get("service_terms_yn"),
+ "reg_user": kwargs.get("reg_user"),
+ "reg_date": kwargs.get("reg_date").strftime("%Y-%m-%d %H:%M:%S"),
+ "amd_user": kwargs.get("amd_user"),
+ "amd_date": kwargs.get("amd_date").strftime("%Y-%m-%d %H:%M:%S"),
+ "openstack-default-project": openstack_default_project,
+ "openstack-user-domain": openstack_user_domain,
+ "limit_cpu": limit_cpu,
+ "limit_mem": limit_mem,
+ "limit_app_count": limit_app_count,
+ "company": company,
+ "companyImagePath": companyImagePath,
+ },
+ }
+
+ # value가 존재할때만 넣어주어야 하는 값에 대한 처리
+ if kwargs.get("user_nm") is None:
+ del reg_data["firstName"]
+ if kwargs.get("email") is None:
+ del reg_data["email"]
+ if kwargs.get("user_password") is None:
+ del reg_data["credentials"]
+
+ try:
+ resToken = await keycloak.alter_user(
+ token=admin_token, realm=settings.KEYCLOAK_INFO.realm, sub=kwargs.get("sub"), **reg_data
+ )
+ logger.info(f"resToken :: {resToken}")
+ if resToken["status_code"] == 204:
+ return JSONResponse(status_code=200, content={"result": 1, "errorMessage": ""})
+ else:
+ return JSONResponse(
+ status_code=400,
+ content={"result": 0, "errorMessage": "Invalid User"},
+ )
+ except Exception as e:
+ logger.error(e, exc_info=True)
+ return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
+
+
+async def create_keycloak_user(**kwargs):
+ admin_token = await get_admin_token()
+
+ reg_data = {
+ "username": kwargs.get("user_id"),
+ "firstName": kwargs.get("user_nm"), # value가 존재할때만 넣어주어야 함
+ "email": kwargs.get("email"), # value가 존재할때만 넣어주어야 함
+ "credentials": [{"value": kwargs.get("user_password")}],
+ "emailVerified": True, # 항상 true
+ "enabled": True, # 항상 true
+ "attributes": {
+ "user_uuid": kwargs.get("user_uuid"),
+ "user_id": kwargs.get("user_id"),
+ "user_nm": kwargs.get("user_nm"),
+ "moblphon": kwargs.get("moblphon"),
+ "user_type": kwargs.get("user_type"),
+ "login_type": kwargs.get("login_type"),
+ "user_role": kwargs.get("user_role"),
+ "adm_yn": kwargs.get("adm_yn"),
+ "user_sttus": kwargs.get("user_sttus"),
+ "blng_org_cd": kwargs.get("blng_org_cd"),
+ "blng_org_nm": kwargs.get("blng_org_nm"),
+ "blng_org_desc": kwargs.get("blng_org_desc"),
+ "service_terms_yn": kwargs.get("service_terms_yn"),
+ "reg_user": kwargs.get("reg_user"),
+ "reg_date": kwargs.get("reg_date").strftime("%Y-%m-%d %H:%M:%S"),
+ "amd_user": kwargs.get("amd_user"),
+ "amd_date": kwargs.get("amd_date").strftime("%Y-%m-%d %H:%M:%S"),
+ "openstack-default-project": "",
+ "openstack-user-domain": "Default",
+ "limit_cpu": "2",
+ "limit_mem": "2048",
+ "limit_app_count": "5",
+ "company": "",
+ "companyImagePath": "",
+ },
+ }
+ res = await keycloak.create_user(token=admin_token, realm=settings.KEYCLOAK_INFO.realm, **reg_data)
+ logger.info(f"res :: {res}")
+ if res["status_code"] != 201:
+ raise CreateKeycloakFailError(f"CreateKeycloakFailError :: {res}")
+
+
+async def get_admin_token() -> None:
+ res = await keycloak.generate_admin_token(
+ username=settings.KEYCLOAK_INFO.admin_username,
+ password=settings.KEYCLOAK_INFO.admin_password,
+ grant_type="password",
+ )
+
+ return res.get("data").get("access_token")
+
+
+async def get_normal_token(**kwargs):
+ return await keycloak.generate_normal_token(
+ realm=settings.KEYCLOAK_INFO.realm,
+ client_id=settings.KEYCLOAK_INFO.client_id,
+ client_secret=settings.KEYCLOAK_INFO.client_secret,
+ grant_type=kwargs.pop("grant_type", "password"),
+ **kwargs,
+ )
+
+
+async def get_social_token(**kwargs):
+ return await keycloak.generate_normal_token(
+ realm=settings.KEYCLOAK_INFO.realm,
+ client_id=settings.KEYCLOAK_INFO.client_id,
+ client_secret=settings.KEYCLOAK_INFO.client_secret,
+ requested_token_type="urn:ietf:params:oauth:token-type:refresh_token",
+ subject_issuer=kwargs.get("social_type"),
+ subject_token=kwargs.get("access_token"),
+ grant_type="urn:ietf:params:oauth:grant-type:token-exchange",
+ )
+
+
+async def keycloak_logout(**kwargs):
+ return await keycloak.logout(
+ realm=settings.KEYCLOAK_INFO.realm,
+ refresh_token=kwargs.get("refresh_token"),
+ client_id=settings.KEYCLOAK_INFO.client_id,
+ client_secret=settings.KEYCLOAK_INFO.client_secret,
+ )
+
+
+async def get_token_info(**kwargs):
+ return await keycloak.token_info(
+ realm=settings.KEYCLOAK_INFO.realm,
+ token=kwargs.get("token"),
+ client_id=settings.KEYCLOAK_INFO.client_id,
+ client_secret=settings.KEYCLOAK_INFO.client_secret,
+ )
diff --git a/API_SERVICE/login_service/app/routes/v2/iris_sso.py b/API_SERVICE/login_service/app/routes/v2/iris_sso.py
new file mode 100644
index 00000000..c2069299
--- /dev/null
+++ b/API_SERVICE/login_service/app/routes/v2/iris_sso.py
@@ -0,0 +1,146 @@
+import json
+import logging
+import random
+import string
+import sys
+import traceback
+from ast import literal_eval
+
+import requests
+from fastapi import APIRouter, Depends, Request
+from starlette.responses import JSONResponse
+
+from libs.auth.keycloak import keycloak
+from libs.database.connector import Executor
+from login_service.app.common.config import settings
+from login_service.app.common.const import COOKIE_NAME, LoginTable, IrisInfoTable
+from login_service.app.database.conn import db
+
+logger = logging.getLogger()
+router = APIRouter()
+
+base_url = "https://b-iris.mobigen.com"
+# base_url = "http://studio.bigdata-car.kr"
+
+
+def get_exception_info():
+ ex_type, ex_value, ex_traceback = sys.exc_info()
+ trace_back = traceback.extract_tb(ex_traceback)
+ trace_log = "\n".join([str(trace) for trace in trace_back])
+ logger.error(
+ f"\n- Exception Type : {ex_type}\n- Exception Message : {str(ex_value).strip()}\n- Exception Log : \n{trace_log}"
+ )
+ return ex_type.__name__
+
+
+def get_token(iris_info, header):
+ iris_id = iris_info[0]["iris_id"]
+ iris_pw = iris_info[0]["iris_pw"]
+ login_iris = {"userId": iris_id, "userPass": iris_pw}
+ res = requests.post(f"{base_url}/authenticate", data=json.dumps(login_iris), verify=False, headers=header)
+
+ return res.json()
+
+
+def get_random_str(is_num: bool) -> str:
+ """
+ :param is_num:
+ - is_num이 True이면 10자리 숫자를 임의로 반환
+ - False이면 5자리 영문자를 랜덤하게 반환
+ :return:
+ """
+ if is_num:
+ result = [str(random.randrange(0, 9)) for _ in range(0, 5)]
+ else:
+ result = [random.choice(string.ascii_lowercase) for _ in range(0, 5)]
+ return "".join(result)
+
+
+@router.get("/user/v2/ConnectIRIS")
+async def api(request: Request, session: Executor = Depends(db.get_db)) -> JSONResponse:
+ header = {"Content-Type": "application/json"}
+ token = request.cookies.get(COOKIE_NAME)
+
+ if not token:
+ msg = "TokenDoesNotExist"
+ logger.info(msg)
+ return JSONResponse(status_code=200, content={"result": 0, "errorMessage": msg})
+
+ token = literal_eval(token)
+ userInfo = await keycloak.user_info(token=token["data"]["access_token"], realm=settings.KEYCLOAK_INFO.realm)
+ if userInfo.get("status_code") != 200:
+ return JSONResponse(
+ status_code=400, content={"result": 0, "errorMessage": userInfo.get("data").get("error_description")}
+ )
+
+ user_id = userInfo.get("data").get("user_id")
+
+ iris_table = IrisInfoTable()
+ try:
+ # join check
+ iris_info = session.query(**iris_table.get_query_data(user_id)).first()
+
+ # join iris
+ if iris_info is None:
+ # get user info
+ user_info = session.query(**LoginTable.get_query_data(user_id)).first()
+
+ # set iris id pw
+ while True:
+ pw = get_random_str(False)
+ iris_pw = f"{pw[0].upper()}{pw[1:]}-{get_random_str(True)}"
+ iris_id = f"katech{get_random_str(False)}{get_random_str(True)}"
+
+ logger.info("=== CREATE USER ID,PW ===")
+ logger.info(f"IRIS ID : {iris_id}")
+ logger.info(f"IRIS PW : {iris_pw}")
+ logger.info("==========================")
+
+ # check duplicate iris_id
+ iris_table.key_column = "iris_id"
+ dup_check = session.query(**iris_table.get_query_data(iris_id)).first()
+ logger.info(dup_check)
+
+ if dup_check is None:
+ logger.info("break")
+ break
+ # insert 구문
+ insert_query = {"user_id": user_id, "iris_id": iris_id, "iris_pw": iris_pw}
+ logger.info(session.execute(**iris_table.upsert_query_data("INSERT", insert_query)))
+
+ # iris join API
+ join_info = {
+ "userId": iris_id,
+ "userPass": iris_pw,
+ "roleCode": "USER",
+ "groupId": "62b3fa2f-f3f5-4f88-a6de-dfef48c5c37a", # Default Group
+ "name": user_info["user_nm"],
+ "desc": "테스트용 아이디",
+ "email": user_id,
+ "phone": user_info["moblphon"],
+ }
+ logger.info(join_info)
+
+ # login
+ iris_root = [{"iris_id": "root", "iris_pw": "!dufmaQkdgkr202208"}] # "Katech12#$"
+ root_token = get_token(iris_root, header)["token"]
+ header["x-access-token"] = root_token
+
+ logger.info(root_token)
+ logger.info(header)
+
+ res = requests.post(f"{base_url}/meta/account", data=json.dumps(join_info), verify=False, headers=header)
+ logger.info(res.text)
+
+ iris_table.key_column = "user_id"
+ del header["x-access-token"]
+
+ iris_info = session.query(**iris_table.get_query_data(user_id)).first()
+ user_token = get_token([iris_info], header)
+
+ result = JSONResponse(status_code=200, content={"result": 1, "errorMessage": "", "data": user_token})
+ except Exception:
+ except_name = get_exception_info()
+ result = JSONResponse(status_code=400, content={"result": 0, "errorMessage": except_name})
+
+ return result
diff --git a/API_SERVICE/login_service/common/config.py b/API_SERVICE/login_service/common/config.py
deleted file mode 100644
index 6e26c70b..00000000
--- a/API_SERVICE/login_service/common/config.py
+++ /dev/null
@@ -1,104 +0,0 @@
-import json
-import logging.config
-import os
-from functools import lru_cache
-from typing import Union
-
-from pydantic import BaseSettings, PostgresDsn, validator, SecretStr
-
-base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-print(f"project base_dir :: {base_dir}")
-
-
-class DBInfo(BaseSettings):
- HOST: str = ""
- PORT: str = ""
- USER: str = ""
- PASS: SecretStr = ""
- BASE: str = ""
-
- def get_dsn(self):
- return ""
-
-
-class PGInfo(DBInfo):
- type: str = "postgres"
- SCHEMA: str = ""
-
- def get_dsn(self):
- return str(
- PostgresDsn.build(
- scheme="postgresql",
- host=self.HOST,
- port=self.PORT,
- user=self.USER,
- password=self.PASS.get_secret_value(),
- path=f"/{self.BASE}",
- )
- )
-
-
-class TiberoInfo(DBInfo):
- type: str = "tibero"
-
- def get_dsn(self):
- return f"DSN={self.BASE};UID={self.USER};PWD={self.PASS.get_secret_value()}"
-
-
-class Settings(BaseSettings):
- BASE_DIR = base_dir
- DB_POOL_RECYCLE: int = 900
- DB_ECHO: bool = False
- RELOAD: bool = True
- TESTING: bool = True
-
- DB_INFO: DBInfo = DBInfo()
- DB_URL: Union[str, PostgresDsn] = None
-
- @validator("DB_URL", pre=True, always=True)
- def assemble_db_url(cls, v, values):
- if all(value is not None for value in values.values()):
- return values.get("DB_INFO").get_dsn()
- raise ValueError("Not all PostgreSQL database connection values were provided.")
-
-
-class ProdSettings(Settings):
- RELOAD = False
- TESTING = False
-
- class Config:
- env_file = f"{base_dir}/.env"
- env_file_encoding = "utf-8"
-
-
-class LocalSettings(Settings):
- TESTING: bool = False
- DB_POOL_RECYCLE: int = 900
- DB_ECHO: bool = True
- RELOAD: bool = False
-
- # DB_INFO = PGInfo(
- # HOST="192.168.100.126", PORT="25432", USER="dpsi", PASS="hello.sitemng12#$", BASE="ktportal", SCHEMA="sitemng"
- # )
-
- DB_INFO: TiberoInfo = TiberoInfo(HOST="192.168.101.164", PORT="8629", USER="dhub", PASS="dhub1234", BASE="tibero")
-
-
-class TestSettings(LocalSettings):
- ...
-
-
-@lru_cache
-def get_settings():
- env = os.getenv("APP_ENV", "prod")
- print(env)
- return {"local": LocalSettings(), "test": TestSettings(), "prod": ProdSettings()}[env]
-
-
-settings = get_settings()
-print(settings)
-
-with open(os.path.join(base_dir, "logging.json")) as f:
- log_config = json.load(f)
- logging.config.dictConfig(log_config)
-logger = logging.getLogger()
diff --git a/API_SERVICE/login_service/common/const.py b/API_SERVICE/login_service/common/const.py
deleted file mode 100644
index faaba160..00000000
--- a/API_SERVICE/login_service/common/const.py
+++ /dev/null
@@ -1,3 +0,0 @@
-SECRET_KEY = "09d25e094faa6ca2556c818166b7a9563b93f7099f6f0f4caa6cf63b88e8d3e7"
-ALGORITHM = "HS256"
-EXPIRE_DELTA = 1
diff --git a/API_SERVICE/login_service/database/conn.py b/API_SERVICE/login_service/database/conn.py
deleted file mode 100644
index 67e8ee34..00000000
--- a/API_SERVICE/login_service/database/conn.py
+++ /dev/null
@@ -1,8 +0,0 @@
-from sqlalchemy.orm import declarative_base
-
-from login_service.common.config import settings
-from libs.database.tibero import TiberoConnector
-from libs.database.orm import SQLAlchemyConnector
-
-Base = declarative_base()
-db = SQLAlchemyConnector(Base) if settings.DB_INFO.type != "tibero" else TiberoConnector()
diff --git a/API_SERVICE/login_service/gunicorn.conf.py b/API_SERVICE/login_service/gunicorn.conf.py
index 5404e638..2b80e338 100644
--- a/API_SERVICE/login_service/gunicorn.conf.py
+++ b/API_SERVICE/login_service/gunicorn.conf.py
@@ -16,8 +16,9 @@
# Must be a positive integer. Generally set in the 64-2048
# range.
#
+import os
-bind = "0.0.0.0:21000"
+bind = "0.0.0.0:8000"
backlog = 2048
#
@@ -144,10 +145,33 @@
#
# A string of "debug", "info", "warning", "error", "critical"
#
-logfile = "./log/login.log"
-errorlog = "./log/login-error.log"
-loglevel = "info"
-accesslog = "./log/login.log"
+
+
+def get_log_path():
+ import os
+
+ path_ = os.path.join(os.path.dirname(os.path.abspath(__file__)), "log")
+ if not os.path.exists(path_):
+ os.makedirs(path_)
+ print(f"make dir {path_}")
+
+ return path_
+
+
+app_env = os.getenv("APP_ENV", "prod")
+if app_env == "prod":
+ loglevel = "info"
+ log_name = "gunicorn-login"
+ log_dir_path = get_log_path()
+ logfile = os.path.join(log_dir_path, log_name + ".log")
+ errorlog = os.path.join(log_dir_path, log_name + "-error.log")
+ accesslog = logfile
+else:
+ loglevel = "debug"
+ logfile = "-"
+ errorlog = "-"
+ accesslog = "-"
+
access_log_format = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"'
#
@@ -163,7 +187,7 @@
# A string or None to choose a default of something like 'gunicorn'.
#
-proc_name = "API-Service-login"
+proc_name = "API-Login-Service"
#
@@ -219,4 +243,4 @@ def worker_int(worker):
def worker_abort(worker):
- worker.log.info("worker received SIGABRT signal")
\ No newline at end of file
+ worker.log.info("worker received SIGABRT signal")
diff --git a/API_SERVICE/login_service/gunicorn.sh b/API_SERVICE/login_service/gunicorn.sh
index f1396c29..4c0844ee 100755
--- a/API_SERVICE/login_service/gunicorn.sh
+++ b/API_SERVICE/login_service/gunicorn.sh
@@ -7,7 +7,7 @@ echo $pid_path
# gunicorn 실행 명령어
start_gunicorn() {
- gunicorn main:app --bind 0.0.0.0:21000 -c gunicorn.conf.py -D --pid $pid_path
+ gunicorn app.main:app --bind 0.0.0.0:21000 -c gunicorn.conf.py -D --pid $pid_path
sleep 2
pid=$(cat $pid_path)
echo "Gunicorn started. PID: $pid"
diff --git a/API_SERVICE/login_service/logging.json b/API_SERVICE/login_service/logging.json
deleted file mode 100644
index a37f24df..00000000
--- a/API_SERVICE/login_service/logging.json
+++ /dev/null
@@ -1,42 +0,0 @@
-{
- "version": 1,
- "disable_existing_loggers": false,
- "formatters": {
- "default": {
- "format": "%(asctime)s %(levelname)s [%(filename)s:%(lineno)d] - %(message)s"
- }
- },
- "handlers": {
- "console": {
- "class": "logging.StreamHandler",
- "level": "DEBUG",
- "formatter": "default"
- },
- "file": {
- "class": "logging.handlers.RotatingFileHandler",
- "level": "DEBUG",
- "formatter": "default",
- "filename": "./log/login.log",
- "mode": "a",
- "maxBytes": 20000000,
- "backupCount": 10
- }
- },
- "loggers": {
- "root": {
- "level": "DEBUG",
- "handlers": ["console", "file"],
- "propagate": false
- },
- "uvicorn.access": {
- "level": "INFO",
- "handlers": ["console", "file"],
- "propagate": false
- },
- "sqlalchemy.engine": {
- "level": "INFO",
- "handlers": ["console", "file"],
- "propagate": false
- }
- }
-}
\ No newline at end of file
diff --git a/API_SERVICE/login_service/main.py b/API_SERVICE/login_service/main.py
deleted file mode 100644
index 4f8755f3..00000000
--- a/API_SERVICE/login_service/main.py
+++ /dev/null
@@ -1,22 +0,0 @@
-import uvicorn
-from fastapi import FastAPI
-from login_service.routes.v1 import auth
-from login_service.common.config import settings
-from login_service.database.conn import db
-
-
-def create_app():
- app_ = FastAPI()
- print(settings.dict())
- db.init_app(app_, **settings.dict())
-
- app_.include_router(auth.router, prefix="/portal/api/common")
-
- return app_
-
-
-app = create_app()
-
-
-if __name__ == "__main__":
- uvicorn.run("main:app", host="0.0.0.0", port=8090, reload=True)
diff --git a/API_SERVICE/login_service/requirements.txt b/API_SERVICE/login_service/requirements.txt
index 75074285..b76eb6f1 100644
--- a/API_SERVICE/login_service/requirements.txt
+++ b/API_SERVICE/login_service/requirements.txt
@@ -1,9 +1,12 @@
aiohttp==3.8.4
aiosignal==1.3.1
anyio==3.6.2
+APScheduler==3.10.1
async-timeout==4.0.2
attrs==23.1.0
bcrypt==4.0.1
+boto3==1.28.67
+botocore==1.31.67
certifi==2022.12.7
cffi==1.15.1
charset-normalizer==3.1.0
@@ -11,23 +14,43 @@ click==8.1.3
cryptography==40.0.2
elastic-transport==8.4.0
elasticsearch==8.7.0
+exceptiongroup==1.1.2
fastapi==0.95.1
frozenlist==1.3.3
+greenlet==2.0.2
gunicorn==20.1.0
h11==0.14.0
+httpcore==0.17.3
+httpx==0.24.1
idna==3.4
+iniconfig==2.0.0
+jmespath==1.0.1
multidict==6.0.4
-psycopg2==2.9.6
-psycopg2-binary==2.9.6
+numpy==1.26.0
+packaging==23.1
+pandas==2.1.0
+passlib==1.7.4
+Pillow==10.0.1
+pluggy==1.2.0
+psycopg2-binary==2.9.9
pycparser==2.21
+pycryptodome==3.18.0
pydantic==1.10.7
PyJWT==2.7.0
-pyodbc==4.0.39
+pytest==7.4.0
+python-dateutil==2.8.2
python-dotenv==1.0.0
+pytz==2023.3
+requests==2.31.0
+s3transfer==0.7.0
+six==1.16.0
sniffio==1.3.0
-SQLAlchemy==2.0.9
+SQLAlchemy==2.0.22
starlette==0.26.1
+tomli==2.0.1
typing_extensions==4.5.0
+tzdata==2023.3
+tzlocal==5.0.1
urllib3==1.26.15
uvicorn==0.21.1
yarl==1.8.2
diff --git a/API_SERVICE/login_service/routes/v1/auth.py b/API_SERVICE/login_service/routes/v1/auth.py
deleted file mode 100644
index d8617a2b..00000000
--- a/API_SERVICE/login_service/routes/v1/auth.py
+++ /dev/null
@@ -1,128 +0,0 @@
-import logging
-from datetime import datetime, timedelta
-from typing import Optional
-
-import bcrypt
-import jwt
-from fastapi import APIRouter, Depends, Request
-from pydantic import BaseModel
-from starlette.responses import JSONResponse
-
-from libs.database.connector import Executor
-from login_service.common.const import ALGORITHM, EXPIRE_DELTA, SECRET_KEY
-from login_service.database.conn import db
-
-
-logger = logging.getLogger()
-
-
-class LoginInfo(BaseModel):
- user_id: str
- password: str
-
-
-class RegisterInfo(BaseModel):
- usridx: str
- id: str
- pwd: str
- nm: Optional[str]
- mbphne: Optional[str]
- phne: Optional[str]
- email: Optional[str]
- dept: Optional[str]
- roleidx: Optional[str]
- aprvusr: Optional[str]
- aprvyn: Optional[str]
- useyn: Optional[str]
- rgstusridx: Optional[str]
- mdfcusridx: Optional[str]
- rgstdt: Optional[str]
- bdt: Optional[str]
- gn: Optional[str]
- usrtpidx: Optional[str]
- usrtp: Optional[str]
- usrclsp: Optional[str]
- work: Optional[str]
- instidx: Optional[str]
- inst: Optional[str]
- cmpno: Optional[str]
-
-
-router = APIRouter()
-
-
-@router.post("/user/register")
-async def register(params: RegisterInfo, session: Executor = Depends(db.get_db)):
- hash_pw = bcrypt.hashpw(params.pwd.encode("utf-8"), bcrypt.gensalt()).decode(encoding="utf-8")
- params.pwd = hash_pw
- try:
- logger.info(params)
- row = session.query(
- table_nm="usr_mgmt",
- where_info=[
- {"table_nm": "usr_mgmt", "key": "usridx", "value": params.usridx, "compare_op": "=", "op": ""},
- {"table_nm": "usr_mgmt", "key": "id", "value": params.id, "compare_op": "=", "op": "AND"},
- ],
- ).first()
- if row:
- return JSONResponse(status_code=200, content={"result": 1, "errorMessage": "Already registered"})
- session.execute(method="INSERT", table_nm="usr_mgmt", data=params.dict())
- return JSONResponse(status_code=200, content={"result": 1, "errorMessage": ""})
- except Exception as e:
- return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
-
-
-@router.post("/user/login")
-async def login(params: LoginInfo, session: Executor = Depends(db.get_db)) -> JSONResponse:
- """
- F01: id, pwd 불일치
- F02: 관리자 승인 필요
- F03: 삭제된 계정
- """
- try:
- row = session.query(
- table_nm="USR_MGMT",
- where_info=[{"table_nm": "USR_MGMT", "key": "id", "value": params.user_id, "compare_op": "=", "op": ""}],
- ).first()
-
- if not row:
- return JSONResponse(content={"result": 0, "errorMessage": "F01"})
- elif row["useyn"] != "Y":
- return JSONResponse(content={"result": 0, "errorMessage": "F03"})
-
- is_verified = bcrypt.checkpw(params.password.encode("utf-8"), row["pwd"].encode("utf-8"))
- if not is_verified:
- return JSONResponse(content={"result": 0, "errorMessage": "F01"})
-
- if row["aprvyn"] != "Y":
- return JSONResponse(content={"result": 0, "errorMessage": "F02"})
-
- access_token = create_access_token(data=row)
- return JSONResponse(
- status_code=200,
- content={"result": 1, "errorMessage": "", "data": {"body": [{"Authorization": f"{access_token}"}]}},
- )
- except Exception as e:
- logger.error(e, exc_info=True)
- logger.error(f"data :: {params}")
- return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
-
-
-@router.get("/user/info")
-async def info(request: Request):
- token = request.headers.get("Authorization")
- if token.startswith("bearer "):
- token = token[7:]
- try:
- return jwt.decode(token, SECRET_KEY, algorithms=ALGORITHM)
- except jwt.DecodeError as e:
- logger.error(f"{e}, token :: {token}", exc_info=True)
-
-
-def create_access_token(data: dict = None, expires_delta: int = EXPIRE_DELTA):
- to_encode = data.copy()
- to_encode.pop("pwd")
- if expires_delta:
- to_encode.update({"exp": datetime.utcnow() + timedelta(hours=expires_delta)})
- encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
- return encoded_jwt
diff --git a/API_SERVICE/meta_service/ELKSearch/config.py b/API_SERVICE/meta_service/ELKSearch/config.py
deleted file mode 100644
index 6d43f646..00000000
--- a/API_SERVICE/meta_service/ELKSearch/config.py
+++ /dev/null
@@ -1,21 +0,0 @@
-from meta_service.ELKSearch.model import ElkServerConfig
-"""
-검색에 사용할 설정을 정의
-local_els와 비슷한 양식으로 정의
-
-ex)
-변수명 = ElsServerConfig(
- "els 주소",
- "els 포트"
-)
-"""
-
-local_server = ElkServerConfig(
- host="0.0.0.0",
- port="9200"
-)
-
-dev_server = ElkServerConfig(
- host="192.168.101.164",
- port="9200"
-)
\ No newline at end of file
diff --git a/API_SERVICE/meta_service/ELKSearch/mapping/test.json b/API_SERVICE/meta_service/ELKSearch/mapping/test.json
deleted file mode 100644
index e972bf91..00000000
--- a/API_SERVICE/meta_service/ELKSearch/mapping/test.json
+++ /dev/null
@@ -1,9 +0,0 @@
-{
- "mappings": {
- "properties": {
- "test_id": {"type": "keyword"},
- "name": {"type": "text"},
- "num": {"type": "integer"}
- }
- }
-}
\ No newline at end of file
diff --git a/API_SERVICE/meta_service/ELKSearch/mapping/vw_co_if.json b/API_SERVICE/meta_service/ELKSearch/mapping/vw_co_if.json
deleted file mode 100644
index 44c07f9d..00000000
--- a/API_SERVICE/meta_service/ELKSearch/mapping/vw_co_if.json
+++ /dev/null
@@ -1,83 +0,0 @@
-{
- "settings": {
- "queries.cache.enabled": "true",
- "refresh_interval":"10s",
- "max_shingle_diff": 10
- },
- "mappings": {
- "properties": {
- "idx": {
- "type": "text"
- },
- "conm": {
- "type": "text"
- },
- "dfnmtraidx": {
- "type": "text"
- },
- "bzrgtcd": {
- "type": "text"
- },
- "prcctcd": {
- "type": "text"
- },
- "coctidx": {
- "type": "text"
- },
- "psnidx": {
- "type": "text"
- },
- "psnnm": {
- "type": "text"
- },
- "phonm": {
- "type": "text"
- },
- "phonn": {
- "type": "text"
- },
- "offrdt": {
- "type": "date",
- "format":"strict_date_optional_time_nanos||yyyy-MM-dd'T'HH:mm:ss.SSSSSS||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
- },
- "enddtENDDT": {
- "type": "date",
- "format":"strict_date_optional_time_nanos||yyyy-MM-dd'T'HH:mm:ss.SSSSSS||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
- },
- "mdfcdt": {
- "type": "date",
- "format":"strict_date_optional_time_nanos||yyyy-MM-dd'T'HH:mm:ss.SSSSSS||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
- },
- "ifoffryn": {
- "type": "text"
- },
- "salprc": {
- "type": "integer"
- },
- "sysdvl": {
- "type": "integer"
- },
- "coretc": {
- "type": "integer"
- },
- "nrtoffr": {
- "type": "integer"
- },
- "coct": {
- "type": "text"
- },
- "dfnmtra": {
- "type": "text"
- },
- "showyn": {
- "type": "text"
- },
- "wpnsystctidx": {
- "type": "text"
- },
- "wpnsystct": {
- "type": "text"
- }
- }
- }
-}
\ No newline at end of file
diff --git a/API_SERVICE/meta_service/ELKSearch/mapping/vw_cr_tc_bsns.json b/API_SERVICE/meta_service/ELKSearch/mapping/vw_cr_tc_bsns.json
deleted file mode 100644
index ec688cd2..00000000
--- a/API_SERVICE/meta_service/ELKSearch/mapping/vw_cr_tc_bsns.json
+++ /dev/null
@@ -1,40 +0,0 @@
-{
- "settings": {
- "queries.cache.enabled": "true",
- "refresh_interval":"10s",
- "max_shingle_diff": 10
- },
- "mappings": {
- "properties": {
- "idx": {
- "type": "text"
- },
- "bsnsnm": {
- "type": "text"
- },
- "spvsntskno": {
- "type": "integer"
- },
- "rschorgannm": {
- "type": "text"
- },
- "ctrprc": {
- "type": "integer"
- },
- "ctrdt": {
- "type": "date",
- "format":"strict_date_optional_time_nanos||yyyy-MM-dd'T'HH:mm:ss.SSSSSS||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
- },
- "enddt": {
- "type": "date",
- "format":"strict_date_optional_time_nanos||yyyy-MM-dd'T'HH:mm:ss.SSSSSS||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
- },
- "crtcbsnsctidx": {
- "type": "text"
- },
- "crtcbsnsct": {
- "type": "text"
- }
- }
- }
-}
\ No newline at end of file
diff --git a/API_SERVICE/meta_service/ELKSearch/mapping/vw_dfn_ind_sp_bsns.json b/API_SERVICE/meta_service/ELKSearch/mapping/vw_dfn_ind_sp_bsns.json
deleted file mode 100644
index 8e93fdd6..00000000
--- a/API_SERVICE/meta_service/ELKSearch/mapping/vw_dfn_ind_sp_bsns.json
+++ /dev/null
@@ -1,40 +0,0 @@
-{
- "settings": {
- "queries.cache.enabled": "true",
- "refresh_interval":"10s",
- "max_shingle_diff": 10
- },
- "mappings": {
- "properties": {
- "idx": {
- "type": "text"
- },
- "bsnsnm": {
- "type": "text"
- },
- "spvsntskno": {
- "type": "integer"
- },
- "rschorgannm": {
- "type": "text"
- },
- "ctrprc": {
- "type": "integer"
- },
- "ctrdt": {
- "type": "date",
- "format":"strict_date_optional_time_nanos||yyyy-MM-dd'T'HH:mm:ss.SSSSSS||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
- },
- "enddt": {
- "type": "date",
- "format":"strict_date_optional_time_nanos||yyyy-MM-dd'T'HH:mm:ss.SSSSSS||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
- },
- "dfnindspbsnsctidx": {
- "type": "text"
- },
- "dfnindspbsnsct": {
- "type": "text"
- }
- }
- }
-}
\ No newline at end of file
diff --git a/API_SERVICE/meta_service/ELKSearch/mapping/vw_item_if.json b/API_SERVICE/meta_service/ELKSearch/mapping/vw_item_if.json
deleted file mode 100644
index d42e31fc..00000000
--- a/API_SERVICE/meta_service/ELKSearch/mapping/vw_item_if.json
+++ /dev/null
@@ -1,47 +0,0 @@
-{
- "settings": {
- "queries.cache.enabled": "true",
- "refresh_interval":"10s",
- "max_shingle_diff": 10
- },
- "mappings": {
- "properties": {
- "idx": {
- "type": "text"
- },
- "itemnm": {
- "type": "text"
- },
- "partmtnm": {
- "type": "text"
- },
- "idtn": {
- "type": "text"
- },
- "cbsn": {
- "type": "text"
- },
- "unt": {
- "type": "text"
- },
- "qty": {
- "type": "integer"
- },
- "cst": {
- "type": "integer"
- },
- "ctrprc": {
- "type": "integer"
- },
- "ctryr": {
- "type": "integer"
- },
- "wpnsystctidx": {
- "type": "text"
- },
- "wpnsystct": {
- "type": "text"
- }
- }
- }
-}
\ No newline at end of file
diff --git a/API_SERVICE/meta_service/ELKSearch/mapping/wpn_stm_bsns.json b/API_SERVICE/meta_service/ELKSearch/mapping/wpn_stm_bsns.json
deleted file mode 100644
index 133705f5..00000000
--- a/API_SERVICE/meta_service/ELKSearch/mapping/wpn_stm_bsns.json
+++ /dev/null
@@ -1,42 +0,0 @@
-{
- "settings": {
- "queries.cache.enabled": "true",
- "refresh_interval":"10s",
- "max_shingle_diff": 10
- },
- "mappings": {
- "properties": {
- "idx": {
- "type": "text"
- },
- "bsnsnm": {
- "type": "text"
- },
- "partmtnm": {
- "type": "text"
- },
- "stmdiv": {
- "type": "text"
- },
- "cbsn": {
- "type": "text"
- },
- "bsnsstp": {
- "type": "text"
- },
- "cst": {
- "type": "integer"
- },
- "ctrprc": {
- "type": "integer"
- },
- "ctrdt": {
- "type": "date",
- "format":"strict_date_optional_time_nanos||yyyy-MM-dd'T'HH:mm:ss.SSSSSS||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
- },
- "wpnsystctidx": {
- "type": "text"
- }
- }
- }
-}
\ No newline at end of file
diff --git a/API_SERVICE/meta_service/ELKSearch/test/test.py b/API_SERVICE/meta_service/ELKSearch/test/test.py
deleted file mode 100644
index 493b0f12..00000000
--- a/API_SERVICE/meta_service/ELKSearch/test/test.py
+++ /dev/null
@@ -1,10 +0,0 @@
-import unittest
-
-
-class MyTestCase(unittest.TestCase):
- def test_something(self):
- self.assertEqual(True, False) # add assertion here
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/API_SERVICE/meta_service/database/models.py b/API_SERVICE/meta_service/app/__init__.py
similarity index 100%
rename from API_SERVICE/meta_service/database/models.py
rename to API_SERVICE/meta_service/app/__init__.py
diff --git a/API_SERVICE/meta_service/app/common/__init__.py b/API_SERVICE/meta_service/app/common/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/API_SERVICE/meta_service/app/common/config.py b/API_SERVICE/meta_service/app/common/config.py
new file mode 100644
index 00000000..3b512416
--- /dev/null
+++ b/API_SERVICE/meta_service/app/common/config.py
@@ -0,0 +1,121 @@
+import logging.config
+import os
+from functools import lru_cache
+
+from pydantic import BaseSettings, PostgresDsn, Field
+
+base_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+print(f"meta base_dir :: {base_dir}")
+
+
+class DBInfo(BaseSettings):
+ DB_POOL_RECYCLE: int = 900
+ DB_ECHO: bool = True
+ DB_URL: str
+
+
+class PGInfo(DBInfo):
+ SCHEMA: str
+
+ class Config:
+ env_file = f"{base_dir}/.env"
+ env_file_encoding = "utf-8"
+
+
+class ELSInfo(BaseSettings):
+ ELS_HOST: str = Field(..., alias="host")
+ ELS_PORT: int = Field(..., alias="port")
+
+ class Config:
+ env_file = f"{base_dir}/.env"
+ env_file_encoding = "utf-8"
+
+
+class Settings(BaseSettings):
+ BASE_DIR = base_dir
+ RELOAD: bool
+ TESTING: bool
+
+ DB_INFO: DBInfo
+ ELS_INFO: ELSInfo
+
+
+class ProdSettings(Settings):
+ RELOAD = False
+ TESTING = False
+
+ DB_INFO = PGInfo()
+ ELS_INFO = ELSInfo()
+
+ class Config:
+ env_file = f"{base_dir}/.env"
+ env_file_encoding = "utf-8"
+
+
+class LocalSettings(Settings):
+ TESTING: bool = False
+ RELOAD: bool = False
+
+ DB_INFO = PGInfo(
+ DB_POOL_RECYCLE=900,
+ DB_ECHO=False,
+ SCHEMA="sitemng,users,meta,iag,ckan,board,analysis",
+ DB_URL=str(
+ PostgresDsn.build(
+ scheme="postgresql",
+ host="192.168.100.126",
+ port="25432",
+ user="dpmanager",
+ password="hello.dp12#$",
+ path="/dataportal",
+ )
+ ),
+ )
+
+ # ELS_INFO = ELSInfo(host="192.168.101.44", port=39200)
+ ELS_INFO = ELSInfo(host="localhost", port=9200)
+
+
+class TestSettings(LocalSettings):
+ TESTING = True
+ RELOAD = True
+
+
+@lru_cache
+def get_settings() -> Settings:
+ env = os.getenv("APP_ENV", "prod")
+ print(env)
+ return {"local": LocalSettings(), "test": TestSettings(), "prod": ProdSettings()}[env]
+
+
+settings = get_settings()
+print(settings)
+
+log_config = {
+ "version": 1,
+ "disable_existing_loggers": False,
+ "formatters": {
+ "standard": {"format": "%(asctime)s %(levelname)s [%(filename)s:%(lineno)d] - %(message)s"},
+ },
+ "handlers": {
+ "console_handler": {
+ "class": "logging.StreamHandler",
+ "level": "DEBUG",
+ "formatter": "standard",
+ },
+ },
+ "root": {"level": "DEBUG", "handlers": ["console_handler"], "propagate": False},
+}
+
+if "prod" == os.getenv("APP_ENV", "prod"):
+ log_config["handlers"]["file_handler"] = {
+ "class": "logging.handlers.RotatingFileHandler",
+ "filename": os.path.join(base_dir, "log", "meta.log"),
+ "mode": "a",
+ "maxBytes": 20000000,
+ "backupCount": 10,
+ "level": "INFO",
+ "formatter": "standard",
+ }
+ log_config["root"]["handlers"].append("file_handler")
+logging.config.dictConfig(log_config)
diff --git a/API_SERVICE/meta_service/app/common/const.py b/API_SERVICE/meta_service/app/common/const.py
new file mode 100644
index 00000000..9c0ad037
--- /dev/null
+++ b/API_SERVICE/meta_service/app/common/const.py
@@ -0,0 +1,20 @@
+from pydantic import BaseModel
+
+from libs.database.dml_controller import Base
+
+
+class Prefix(BaseModel):
+ index: str
+ size: int
+ fields: list
+ query: str
+
+
+class MetaTempTable(Base):
+ table_nm = "meta_temp"
+ key_column = "gimi9_rid"
+
+
+class MetaHtmlTable(Base):
+ table_nm = "tb_meta_html"
+ key_column = "emid"
diff --git a/API_SERVICE/meta_service/app/common/search.py b/API_SERVICE/meta_service/app/common/search.py
new file mode 100644
index 00000000..d75f7e94
--- /dev/null
+++ b/API_SERVICE/meta_service/app/common/search.py
@@ -0,0 +1,131 @@
+from datetime import datetime
+
+from pydantic import BaseModel
+
+from libs.els.ELKSearch.Utils.base import set_els, make_format
+from libs.els.ELKSearch.document import DocumentManager
+from libs.els.ELKSearch.model import InputModel
+from meta_service.app.common.config import base_dir
+
+
+class Record(BaseModel):
+ index: str
+ key: str
+ ids: str
+
+
+class SearchModel(InputModel):
+ chk: bool = False
+
+
+def exception_col(table_nm: str, insert_body: dict) -> dict:
+ """
+ db데이터를 els에 넣기전 실행해야 하는 예외 처리
+ 입력하지 못하는 column들을 insert 구문에서 삭제 해주는 기능
+ """
+ pass
+
+
+def default_search_set(host, port, index, size=10, from_=0):
+ """
+ 검색에 필요한 default 세팅
+ 자동완성과 검색에 사용
+ """
+ es = set_els(host, port)
+ docmanger = DocumentManager(es, index)
+ docmanger.set_pagination(size, from_)
+ return docmanger
+
+
+def record_keyword(search_option):
+ word_path = f"{base_dir}/log/{datetime.today().strftime('%Y%m%d')}_search.log"
+ with open(word_path, "a") as fp:
+ for search_query in search_option:
+ fp.write(f"{str(search_query.keywords)}\n")
+
+
+def base_query(len_query: int, queryOption: list) -> list:
+ """
+ 검색에 사용되는 base_query, match must 방식으로 query를 만들어 준다
+ :param queryOption: ELKSearch model SearchOption or FilterOption
+ :return:
+ ["multi_match": {
+ "query": "data_1",
+ "fields": ["column_1"],
+ "type": "phrase_prefix"
+ }
+ ]
+ """
+ if len_query:
+ query_func = "multi_match"
+ query_type = "phrase_prefix"
+
+ return [
+ {
+ query_func: {
+ "query": str(query.keywords[0]),
+ "fields": query.field,
+ "type": query_type,
+ }
+ }
+ if len(query.keywords) == 1
+ else {
+ query_func: {
+ "query": " ".join(query.keywords),
+ "fields": query.field,
+ "operator": query.operator,
+ }
+ }
+ for query in queryOption
+ ]
+
+ else:
+ return queryOption
+
+
+def delete_srttn(item_list):
+ for i, item in enumerate(item_list):
+ if "fields" in item["multi_match"].keys() and item["multi_match"]["fields"][0] == "data_srttn":
+ del item_list[i]
+ break
+ return item_list
+
+
+def check_query(query_dict, item_list):
+ if "match_all" in query_dict["query"].keys():
+ return make_format("query", "bool", {"filter": item_list}), item_list
+ else:
+ return query_dict, delete_srttn(item_list)
+
+
+def search_count(es, item_list, query_dict):
+ # data_srttn 순서 고정
+ # totalCount에 해외데이터는 포함되지 않는다
+ data_srttn = {
+ # search_keyword: (result_key, result_data)
+ "전체": "totalCount",
+ "보유데이터": "hasCount",
+ "연동데이터": "innerCount",
+ "외부데이터": "externalCount",
+ "해외데이터": "overseaCount",
+ }
+ data_dict = dict()
+ index = "biz_meta,v_biz_meta_oversea_els"
+
+ # set query count dict
+ query_dict, item_list = check_query(query_dict, item_list)
+ srttn_index = len(item_list)
+ for ko_nm, eng_nm in data_srttn.items():
+ if ko_nm != "전체":
+ item_list = item_list[:srttn_index]
+ cnt_query = make_format(
+ "match", "data_srttn", {"operator": "OR", "query": ko_nm}
+ )
+ item_list.append(cnt_query)
+
+ query_dict["query"]["bool"]["filter"] = item_list
+ es.index = index
+ cnt = es.count(body=query_dict)
+ data_dict[eng_nm] = cnt
+
+ return data_dict
diff --git a/API_SERVICE/meta_service/app/common/utils.py b/API_SERVICE/meta_service/app/common/utils.py
new file mode 100644
index 00000000..264ee2ad
--- /dev/null
+++ b/API_SERVICE/meta_service/app/common/utils.py
@@ -0,0 +1,28 @@
+import re
+from copy import deepcopy
+
+
+def data_process(data):
+ pre_data = deepcopy(data)
+ for k, v in data.items():
+ if not v:
+ continue
+
+ if k in ["ctgry", "data_shap", "data_prv_desk"]:
+ key = f"re_{k}"
+ pre_data[key] = re.sub("[ ]", "", str(v))
+
+ if isinstance(v, str):
+ match = re.match(r"(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2})\.(\d+)", v)
+ if match:
+ date_time_field = match.group(1).replace(" ", "T")
+ micro_time_field = match.group(2)
+
+ if "+" in micro_time_field:
+ micro_time_field = micro_time_field.split("+")[0]
+ if len(micro_time_field) < 6:
+ micro_time_field = micro_time_field + "0"
+
+ pre_data[k] = f"{date_time_field}.{micro_time_field}"
+
+ return {"_id": pre_data["biz_dataset_id"], "_source": pre_data}
diff --git a/API_SERVICE/meta_service/app/database/__init__.py b/API_SERVICE/meta_service/app/database/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/API_SERVICE/meta_service/app/database/conn.py b/API_SERVICE/meta_service/app/database/conn.py
new file mode 100644
index 00000000..b39f27a1
--- /dev/null
+++ b/API_SERVICE/meta_service/app/database/conn.py
@@ -0,0 +1,6 @@
+from sqlalchemy.ext.automap import automap_base
+
+from libs.database.orm import SQLAlchemyConnector
+
+Base = automap_base()
+db = SQLAlchemyConnector(Base)
diff --git a/API_SERVICE/meta_service/app/database/models.py b/API_SERVICE/meta_service/app/database/models.py
new file mode 100644
index 00000000..e69de29b
diff --git a/API_SERVICE/meta_service/app/main.py b/API_SERVICE/meta_service/app/main.py
new file mode 100644
index 00000000..aaf47cfe
--- /dev/null
+++ b/API_SERVICE/meta_service/app/main.py
@@ -0,0 +1,27 @@
+import logging
+
+import uvicorn
+from fastapi import FastAPI
+
+from meta_service.app.common.config import settings
+from meta_service.app.database.conn import db
+from meta_service.app.routes.v1 import els, category, meta_insert
+logger = logging.getLogger()
+
+
+def create_app():
+ app_ = FastAPI()
+ logger.info(settings.dict())
+ db.init_app(app_, **settings.dict())
+
+ app_.include_router(category.router, prefix="/portal/api/meta")
+ app_.include_router(els.router, prefix="/portal/api/meta")
+ app_.include_router(meta_insert.router, prefix="/portal/api/meta")
+ return app_
+
+
+app = create_app()
+
+
+if __name__ == "__main__":
+ uvicorn.run("main:app", host="0.0.0.0", port=8090, reload=True)
diff --git a/API_SERVICE/meta_service/app/routes/__init__.py b/API_SERVICE/meta_service/app/routes/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/API_SERVICE/meta_service/app/routes/v1/__init__.py b/API_SERVICE/meta_service/app/routes/v1/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/API_SERVICE/meta_service/app/routes/v1/category.py b/API_SERVICE/meta_service/app/routes/v1/category.py
new file mode 100644
index 00000000..5e9c332d
--- /dev/null
+++ b/API_SERVICE/meta_service/app/routes/v1/category.py
@@ -0,0 +1,88 @@
+import logging
+
+from fastapi import Depends, APIRouter
+from pydantic import BaseModel
+
+from libs.database.connector import Executor
+from libs.els.ELKSearch.Utils.base import make_format
+from meta_service.app.common.config import settings
+from meta_service.app.common.search import default_search_set
+from meta_service.app.database.conn import db
+
+router = APIRouter()
+logger = logging.getLogger()
+
+
+class UpdateCategory(BaseModel):
+ node_id: str
+ node_nm: str
+
+
+@router.post("/updateCategory")
+def update_category(update: UpdateCategory, session: Executor = Depends(db.get_db)):
+ """
+ 현재 사용하지 않는 API
+ 구 버전과 동작은 동작은 같은 코드
+ prnts_id 값을 uuid값으로 부여한다.
+ """
+ # table_nm = "tb_category"
+ # data_query = {
+ # "table_nm": table_nm,
+ # "key": "*",
+ # "where_info": [
+ # {
+ # "table_nm": table_nm,
+ # "key": "node_id",
+ # "value": update.node_id,
+ # "op": "",
+ # "compare_op": "="
+ # }
+ # ]
+ # }
+ # try:
+ # # select
+ # row = session.query(**data_query).first()
+ # row["prnts_id"] = convert_data(uuid.uuid4())
+ # # update
+ # update_query = {
+ # "method": "UPDATE",
+ # "table_nm": table_nm,
+ # "data": row,
+ # "key": "node_id"
+ # }
+ # session.execute(**update_query)
+ try:
+ result = {"result": 1, "errorMessage": ""}
+ except Exception as e:
+ result = {"result": 0, "errorMessage": str(e)}
+ logger.error(e, exc_info=True)
+ return result
+
+
+class DeleteData(BaseModel):
+ biz_dataset_id: str
+
+
+@router.get("/getCategoryNmCount")
+def update_category(nms: str):
+ """
+ 메인 페이지에 사용
+ :param nms: 기타,미래차 산업, ....
+ :return:
+ """
+ data_dict = {}
+ index = "biz_meta,v_biz_meta_oversea_els"
+ key = "re_ctgry"
+ try:
+ docmanager = default_search_set(settings.ELS_INFO.ELS_HOST, settings.ELS_INFO.ELS_PORT, index)
+ ctgry_nm_list = nms.split(",")
+ for c_id in ctgry_nm_list:
+ c_v = c_id.replace(" ", "")
+ cnt_query = make_format("query", "match_phrase", {key: c_v})
+ cnt = docmanager.count(body=cnt_query)
+ data_dict[c_id.replace(" ", "_")] = cnt
+ result = {"result": 1, "errorMessage": "", "data": data_dict}
+ except Exception as e:
+ result = {"result": 0, "errorMessage": str(e)}
+ logger.error(e, exc_info=True)
+ return result
diff --git a/API_SERVICE/meta_service/app/routes/v1/els.py b/API_SERVICE/meta_service/app/routes/v1/els.py
new file mode 100644
index 00000000..55646104
--- /dev/null
+++ b/API_SERVICE/meta_service/app/routes/v1/els.py
@@ -0,0 +1,295 @@
+import decimal
+import logging
+import os.path
+
+from fastapi import Depends, APIRouter
+from pydantic import BaseModel
+
+from libs.database.connector import Executor
+from libs.els.ELKSearch.Utils.base import make_format
+from libs.els.ELKSearch.Utils.base import set_els
+from libs.els.ELKSearch.Utils.document_utils import search_filter
+from libs.els.ELKSearch.index import Index
+from meta_service.app.common.config import settings
+from meta_service.app.common.const import Prefix
+from meta_service.app.common.search import SearchModel
+from meta_service.app.common.search import default_search_set, base_query, record_keyword, search_count, Record
+from meta_service.app.common.utils import data_process
+from meta_service.app.database.conn import db
+
+router = APIRouter()
+
+logger = logging.getLogger()
+
+
+class DeleteData(BaseModel):
+ biz_dataset_id: str
+
+
+@router.post("/bulk_update", response_model=dict)
+def els_update(index: str, key: str = "biz_dataset_id", session: Executor = Depends(db.get_db)):
+ """
+ - bulk update에 사용되는 api X
+
+ :param index: els index 명
+ :param key: id 값
+ :param session: db session
+ :return:
+ """
+ # data_query = "SELECT {0} FROM {1};"
+ data_query = {"table_nm": index}
+ try:
+ column_dict = session.get_column_info(index, settings.DB_INFO.SCHEMA)
+ columns = [col["column_name"] for col in column_dict]
+ rows = session.query(**data_query).all()[0]
+
+ docmanager = default_search_set(settings.ELS_INFO.ELS_HOST, settings.ELS_INFO.ELS_PORT, index)
+
+ for row in rows:
+ insert_body = dict()
+ for i in range(0, len(columns)):
+ insert_body[columns[i]] = row[columns[i]]
+ insert_body = data_process(insert_body)
+ docmanager.set_body(insert_body["_source"])
+ logger.info(docmanager.insert(insert_body["_source"][key]))
+ result = {"result": 1, "data": "test"}
+
+ except Exception as e:
+ result = {"result": 0, "errorMessage": str(e)}
+ logger.error(e, exc_info=True)
+ return result
+
+
+@router.post("/getElsBizMetaList")
+@router.post("/getElsCkanList")
+def search(input: SearchModel):
+ """
+ :param input:
+ {
+ "chk": true,
+ "index": "index_name",
+ "from": 0, # page
+ "size": 10, # result size
+ "resultField": ["col1", "col2"],
+ "sortOption": [{"col": "desc"}],
+ "searchOption": [
+ {
+ "field": ["conm"],
+ "operator": "OR",
+ "keywords": ["기업명"]
+ }
+ ],
+ "filterOption": []
+ }
+ :return:
+ {
+ "result": 1,
+ "data": {
+ "header": {"column_name": "col1", "kor_column_name": "컬럼명1"},
+ "count": "10", # total count
+ "body": [{data set 1}, {data set 2} ... {data set 10}]
+ }
+ }
+ """
+ try:
+ len_search = len(input.searchOption)
+ len_filter = len(input.filterOption)
+ len_range = len(input.rangeOption)
+
+ if input.chk and len_search:
+ # 추천검색어를 위한 검색어 저장
+ record_keyword(input.searchOption)
+
+ # from_ 0 부터 시작해야함, web에서는 1부터 넘어오기 때문에 1을 빼준다
+ print(input.dict())
+ docmanager = default_search_set(
+ settings.ELS_INFO.ELS_HOST, settings.ELS_INFO.ELS_PORT, input.index, input.size, input.from_ - 1
+ )
+
+ # query에 조건이 없으면 match all 실행
+ if not any([len_filter, len_search, len_range]):
+ body = make_format("query", "match_all", dict())
+ filter_query = []
+ else:
+ search_format = "(*{0}*)"
+ search_query = []
+ for query in input.searchOption:
+ keywords = [search_format.format(keyword) for keyword in query.keywords]
+ if len(keywords) > 1:
+ keywords = f" {query.operator.upper()} ".join(keywords)
+ else:
+ keywords = keywords[0]
+ search_query.append(
+ {"query_string": {"query": keywords, "fields": query.field, "minimum_should_match": "100%"}}
+ )
+
+ filter_query = base_query(len_filter, input.filterOption)
+
+ # range option
+ for query in input.rangeOption:
+ filter_query.append(make_format("range", query.field, query.compare_dict))
+
+ body = make_format("query", "bool", {"must": search_query, "filter": filter_query})
+
+ docmanager.set_sort(input.sortOption)
+ docmanager.set_body(body)
+
+ data_dict = {"searchList": search_filter(docmanager.find(input.resultField))}
+ data_dict.update(search_count(docmanager, filter_query, body))
+
+ result = {"result": 1, "errorMessage": "", "data": data_dict}
+ except Exception as e:
+ result = {"result": 0, "errorMessage": str(e)}
+ logger.error(e, exc_info=True)
+ return result
+
+
+@router.post("/deleteElsBizMeta")
+def els_delete(input: DeleteData):
+ try:
+ index = "biz_dataset_id"
+ docmanager = default_search_set(settings.ELS_INFO.ELS_HOST, settings.ELS_INFO.ELS_PORT, index)
+ docmanager.delete(index, input.biz_dataset_id)
+
+ result = {"result": 1, "data": f"{input.biz_dataset_id} delete"}
+ except Exception as e:
+ result = {"result": 0, "errorMessage": str(e)}
+ logger.error(e, exc_info=True)
+ return result
+
+
+@router.post("/els-index-create", response_model=dict)
+def els_update(index: str):
+ try:
+ es = set_els(settings.ELS_INFO.ELS_HOST, settings.ELS_INFO.ELS_PORT)
+ ind_manager = Index(es)
+ indices = ind_manager.all_index().keys()
+ if index not in indices:
+ logger.info(
+ ind_manager.create(
+ index=index, path=os.path.join(settings.BASE_DIR, "resources", "mapping", f"{index}.json")
+ )
+ )
+ result = {"result": 1, "data": "success"}
+
+ except Exception as e:
+ result = {"result": 0, "errorMessage": str(e)}
+ logger.error(e, exc_info=True)
+ return result
+
+
+@router.get("/updateElsBizMetaBulk", response_model=dict)
+def els_update(session: Executor = Depends(db.get_db)):
+ key = "biz_dataset_id"
+ table_nm = "v_biz_meta_info"
+ index = "biz_meta"
+ data_query = {
+ "table_nm": table_nm,
+ "where_info": [{"table_nm": table_nm, "key": "status", "value": "D", "compare_op": "=", "op": ""}],
+ }
+
+ try:
+ rows, _ = session.query(**data_query).all()
+ print(rows[0])
+ columns = list(rows[0].keys())
+ print(columns)
+
+ docmanager = default_search_set(settings.ELS_INFO.ELS_HOST, settings.ELS_INFO.ELS_PORT, index)
+
+ for row in rows:
+ insert_body = data_process(row)
+ print(insert_body)
+ docmanager.set_body(insert_body["_source"])
+ res = docmanager.insert(insert_body["_id"])
+ logger.info(res)
+ result = {"result": 1, "data": "test"}
+
+ except Exception as e:
+ result = {"result": 0, "errorMessage": str(e)}
+ logger.error(e, exc_info=True)
+ return result
+
+
+@router.post("/insertElsBizMeta", response_model=dict)
+def els_update(input: Record, session: Executor = Depends(db.get_db)):
+
+ data_query = {
+ "table_nm": input.index,
+ "where_info": [{"table_nm": input.index, "key": input.key, "value": input.ids, "compare_op": "in", "op": ""}],
+ }
+ logger.info(data_query)
+ try:
+ rows, _ = session.query(**data_query).all()
+ print(rows[0])
+ columns = list(rows[0].keys())
+ print(columns)
+
+ docmanager = default_search_set(settings.ELS_INFO.ELS_HOST, settings.ELS_INFO.ELS_PORT, input.index)
+
+ for row in rows:
+ for k, v in row.items():
+ if isinstance(v, decimal.Decimal):
+ row[k] = int(v)
+
+ # insert_body = exception_col(input.index, insert_body)
+ docmanager.set_body(row)
+ doc_id = row[input.key]
+ logger.info(docmanager.update(doc_id))
+ result = {"result": 1, "data": "test"}
+
+ except Exception as e:
+ result = {"result": 0, "errorMessage": str(e)}
+ logger.error(e, exc_info=True)
+ return result
+
+
+@router.post("/getPrefixBizMeta", response_model=dict)
+def autocomplete(input: Prefix):
+ """
+ 자동완성 API
+ :param input:
+ {
+ "index": "index_name",
+ "size": 5,
+ "fields": [
+ "col1", "col2"
+ ],
+ "query": "search keyword"
+ }
+ :return:
+ {
+ "result": 1,
+ "data": ["data1","data2"..."data5"]
+ }
+ """
+ try:
+ keyword = input.query
+ docmanager = default_search_set(settings.ELS_INFO.ELS_HOST, settings.ELS_INFO.ELS_PORT, input.index, input.size)
+ input.query = f"(*{input.query}*)"
+ del input.index
+ del input.size
+
+ search_query = {"query_string": input.dict()}
+
+ body = {"query": {"bool": {"must": [search_query]}}}
+
+ logger.info(body)
+ docmanager.set_body(body)
+ prefix_dict = search_filter(docmanager.find(input.fields))
+ print(prefix_dict)
+
+ if not len(prefix_dict):
+ return {"result": 1, "data": []}
+
+ # lower() 대소문자 구별 없이 검색하기 위한 방법
+ prefix_data = [
+ word for data in prefix_dict for word in data.values() if word and keyword.lower() in word.lower()
+ ]
+
+ # 데이터셋에서 해당 되는 데이터가 여러개 있을 수 있어 prefix_data에 size를 줌
+ result = {"result": 1, "data": prefix_data[: docmanager.size]}
+ except Exception as e:
+ result = {"result": 0, "errorMessage": str(e)}
+ logger.error(e, exc_info=True)
+
+ return result
diff --git a/API_SERVICE/meta_service/app/routes/v1/meta_insert.py b/API_SERVICE/meta_service/app/routes/v1/meta_insert.py
new file mode 100644
index 00000000..ab06dac5
--- /dev/null
+++ b/API_SERVICE/meta_service/app/routes/v1/meta_insert.py
@@ -0,0 +1,51 @@
+import base64
+import logging
+import os
+
+from fastapi import APIRouter, Depends
+
+from libs.database.connector import Executor
+from meta_service.app.common.const import MetaTempTable, MetaHtmlTable
+from meta_service.app.database.conn import db
+
+router = APIRouter()
+logger = logging.getLogger()
+
+
+def print_files_in_dir(root_dir, file_name):
+ files = os.listdir(root_dir)
+ print(len(files))
+ for file in files:
+ path = os.path.join(root_dir, file, file_name)
+ print(path)
+
+
+@router.get("/metaInsert")
+def update_category(session: Executor = Depends(db.get_db)):
+ eda_path = "/Users/cbc/Downloads/EDA_FILE"
+ try:
+ files = os.listdir(eda_path)
+ id_cnt = 0
+ for index, rid in enumerate(files):
+ print(index)
+ path = os.path.join(eda_path, rid, "profile_report_merged.html")
+ with open(path, "rb") as fd:
+ data = fd.read()
+ data_base64 = base64.b64encode(data).decode("ascii")
+ insert_data = f"data:text/html;base64,{data_base64}"
+ print(f"LEN : {len(insert_data)}")
+
+ select_res = session.query(**MetaTempTable.get_select_query(rid)).first()
+ if select_res:
+ biz_dataset_id = select_res["biz_dataset_id"]
+ data = {"biz_dataset_id": biz_dataset_id, "file_data": insert_data}
+ session.execute(**MetaHtmlTable.get_execute_query("INSERT", data))
+ else:
+ id_cnt += 1
+ print(f"id_cnt : {id_cnt}")
+
+ result = {"result": 1, "errorMessage": ""}
+ except Exception as e:
+ result = {"result": 0, "errorMessage": str(e)}
+ logger.error(e, exc_info=True)
+ return result
diff --git a/API_SERVICE/meta_service/common/config.py b/API_SERVICE/meta_service/common/config.py
deleted file mode 100644
index 2e373a24..00000000
--- a/API_SERVICE/meta_service/common/config.py
+++ /dev/null
@@ -1,107 +0,0 @@
-import json
-import logging.config
-import os
-from functools import lru_cache
-from typing import Union
-
-from pydantic import BaseSettings, PostgresDsn, validator, SecretStr
-
-base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-print(f"project base_dir :: {base_dir}")
-
-
-class DBInfo(BaseSettings):
- HOST: str = ""
- PORT: str = ""
- USER: str = ""
- PASS: SecretStr = ""
- BASE: str = ""
- SCHEMA: str = ""
-
- def get_dsn(self):
- return ""
-
-
-class PGInfo(DBInfo):
- type: str = "postgres"
- SCHEMA: str = ""
-
- def get_dsn(self):
- return str(
- PostgresDsn.build(
- scheme="postgresql",
- host=self.HOST,
- port=self.PORT,
- user=self.USER,
- password=self.PASS.get_secret_value(),
- path=f"/{self.BASE}",
- )
- )
-
-
-class TiberoInfo(DBInfo):
- type: str = "tibero"
-
- def get_dsn(self):
- return f"DSN={self.BASE};UID={self.USER};PWD={self.PASS.get_secret_value()}"
-
-
-class Settings(BaseSettings):
- BASE_DIR = base_dir
- DB_POOL_RECYCLE: int = 900
- DB_ECHO: bool = False
- RELOAD: bool = True
- TESTING: bool = True
-
- DB_INFO: DBInfo = DBInfo()
- DB_URL: Union[str, PostgresDsn] = None
-
- @validator("DB_URL", pre=True, always=True)
- def assemble_db_url(cls, v, values):
- if all(value is not None for value in values.values()):
- return values.get("DB_INFO").get_dsn()
- raise ValueError("Not all PostgreSQL database connection values were provided.")
-
-
-class ProdSettings(Settings):
- RELOAD = False
- TESTING = False
-
- class Config:
- env_file = f"{base_dir}/.env"
- env_file_encoding = "utf-8"
-
-
-class LocalSettings(Settings):
- TESTING: bool = False
- DB_POOL_RECYCLE: int = 900
- DB_ECHO: bool = True
- RELOAD: bool = False
-
- # DB_INFO = PGInfo(
- # HOST="192.168.100.126", PORT="25432", USER="dpsi", PASS="hello.sitemng12#$", BASE="ktportal", SCHEMA="sitemng"
- # )
-
- DB_INFO: TiberoInfo = TiberoInfo(
- HOST="192.168.101.164", PORT="8629", USER="dhub", PASS="dhub1234", BASE="tibero", SCHEMA="DHUB"
- )
-
-
-class TestSettings(LocalSettings):
- ...
-
-
-@lru_cache
-def get_settings() -> Settings:
- env = os.getenv("APP_ENV", "prod")
- print(env)
- return {"local": LocalSettings(), "test": TestSettings(), "prod": ProdSettings()}[env]
-
-
-settings = get_settings()
-print(settings)
-
-with open(os.path.join(base_dir, "logging.json")) as f:
- log_config = json.load(f)
- logging.config.dictConfig(log_config)
-logger = logging.getLogger()
\ No newline at end of file
diff --git a/API_SERVICE/meta_service/common/search.py b/API_SERVICE/meta_service/common/search.py
deleted file mode 100644
index c521ef58..00000000
--- a/API_SERVICE/meta_service/common/search.py
+++ /dev/null
@@ -1,67 +0,0 @@
-from pydantic import BaseModel
-from meta_service.ELKSearch.Utils.base import set_els, make_format
-from meta_service.ELKSearch.document import DocumentManager
-from fastapi.logger import logger
-
-
-class Upsert(BaseModel):
- index: str
- key: str
- ids: str
-
-
-def exception_col(table_nm: str, insert_body: dict) -> dict:
- """
- db데이터를 els에 넣기전 실행해야 하는 예외 처리
- 입력하지 못하는 column들을 insert 구문에서 삭제 해주는 기능
- """
-
- if table_nm == "vw_co_if":
- insert_body.pop("mjrdfnprdc", None)
- insert_body.pop("mjrcvlprdc", None)
- insert_body.pop("skl", None)
- if table_nm == "vw_expr_item_db":
- logger.info(insert_body.keys())
- for key in list(insert_body.keys()):
- if not key in ["idx", "korconm"]:
- insert_body.pop(key, None)
- return insert_body
-
-
-def default_search_set(server_config, index, size=10, from_=0):
- """
- 검색에 필요한 default 세팅
- 자동완성과 검색에 사용
- """
- es = set_els(server_config)
- docmanger = DocumentManager(es, index)
- docmanger.set_pagination(size, from_)
- return docmanger
-
-
-def base_query(len_query:int, queryOption: list) -> list:
- """
- 검색에 사용되는 base_query, match must 방식으로 query를 만들어 준다
- :param queryOption: ELKSearch model SearchOption or FilterOption
- :return:
- ["multi_match": {
- "query": "data_1",
- "fields": ["column_1"],
- "type": "phrase_prefix"
- }
- ]
- """
- if len_query:
- query_func = "multi_match"
- query_type = "phrase_prefix"
-
- return [
- {query_func: {"query": str(query.keywords[0]), "fields": query.field, "type": query_type}}
- if len(query.keywords) == 1 else
- {query_func: {"query": " ".join(query.keywords), "fields": query.field, "operator": query.operator}}
- for query in queryOption
- ]
-
- else:
- return queryOption
-
\ No newline at end of file
diff --git a/API_SERVICE/meta_service/database/conn.py b/API_SERVICE/meta_service/database/conn.py
deleted file mode 100644
index b21b873f..00000000
--- a/API_SERVICE/meta_service/database/conn.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from fastapi import FastAPI
-from sqlalchemy import MetaData
-from sqlalchemy.ext.automap import automap_base
-from sqlalchemy.orm import declarative_base
-
-from meta_service.common.config import settings
-from libs.database.tibero import TiberoConnector
-from libs.database.orm import SQLAlchemyConnector
-
-
-# TODO: SQLAlchemy version 수정필요
-class SQLAlchemyForCommon(SQLAlchemyConnector):
- def __init__(self, app: FastAPI = None, **kwargs):
- self._table_dict = None
- if app is not None:
- self.init_app(app=app, **kwargs)
-
- metadata = MetaData()
- for schema in kwargs.get("PG_SCHEMA").split(","):
- metadata.reflect(bind=self.engine, views=True, schema=schema)
-
- self._Base = automap_base(metadata=metadata)
- self._Base.prepare()
-
- # self._table_dict = dict(metadata.tables)
-
-
-Base = declarative_base()
-db = SQLAlchemyForCommon(Base) if settings.DB_INFO.type != "tibero" else TiberoConnector()
diff --git a/API_SERVICE/meta_service/gunicorn.conf.py b/API_SERVICE/meta_service/gunicorn.conf.py
index a06cc858..f20dc6b2 100644
--- a/API_SERVICE/meta_service/gunicorn.conf.py
+++ b/API_SERVICE/meta_service/gunicorn.conf.py
@@ -16,6 +16,7 @@
# Must be a positive integer. Generally set in the 64-2048
# range.
#
+import os
bind = "0.0.0.0:8000"
backlog = 2048
@@ -73,7 +74,7 @@
worker_connections = 1000
timeout = 60
keepalive = 2
-reload = True
+reload = False
#
# spew - Install a trace function that spews every line of Python
@@ -127,6 +128,7 @@
#
daemon = False
+pidfile = "./gunicorn-meta.pid"
umask = 0
user = None
group = None
@@ -143,10 +145,33 @@
#
# A string of "debug", "info", "warning", "error", "critical"
#
-logfile = "./log/common.log"
-errorlog = "./log/common-error.log"
-loglevel = "info"
-accesslog = "./log/common.log"
+
+
+def get_log_path():
+ import os
+
+ path_ = os.path.join(os.path.dirname(os.path.abspath(__file__)), "log")
+ if not os.path.exists(path_):
+ os.makedirs(path_)
+ print(f"make dir {path_}")
+
+ return path_
+
+
+app_env = os.getenv("APP_ENV", "prod")
+if app_env == "prod":
+ loglevel = "info"
+ log_name = "gunicorn-meta"
+ log_dir_path = get_log_path()
+ logfile = os.path.join(log_dir_path, log_name + ".log")
+ errorlog = os.path.join(log_dir_path, log_name + "-error.log")
+ accesslog = logfile
+else:
+ loglevel = "debug"
+ logfile = "-"
+ errorlog = "-"
+ accesslog = "-"
+
access_log_format = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"'
#
@@ -162,7 +187,7 @@
# A string or None to choose a default of something like 'gunicorn'.
#
-proc_name = "API-Service-common"
+proc_name = "API-Meta-Service"
#
diff --git a/API_SERVICE/meta_service/gunicorn.sh b/API_SERVICE/meta_service/gunicorn.sh
index f81bcb9c..c69e075d 100755
--- a/API_SERVICE/meta_service/gunicorn.sh
+++ b/API_SERVICE/meta_service/gunicorn.sh
@@ -7,7 +7,7 @@ echo $pid_path
# gunicorn 실행 명령어
start_gunicorn() {
- gunicorn main:app --bind 0.0.0.0:22000 -c gunicorn.conf.py -D --pid $pid_path
+ gunicorn app.main:app --bind 0.0.0.0:22000 -c gunicorn.conf.py -D --pid $pid_path
sleep 2
pid=$(cat $pid_path)
echo "Gunicorn started. PID: $pid"
diff --git a/API_SERVICE/meta_service/logging.json b/API_SERVICE/meta_service/logging.json
deleted file mode 100644
index 7c5ff75e..00000000
--- a/API_SERVICE/meta_service/logging.json
+++ /dev/null
@@ -1,42 +0,0 @@
-{
- "version": 1,
- "disable_existing_loggers": false,
- "formatters": {
- "default": {
- "format": "%(asctime)s %(levelname)s [%(filename)s:%(lineno)d] - %(message)s"
- }
- },
- "handlers": {
- "console": {
- "class": "logging.StreamHandler",
- "level": "DEBUG",
- "formatter": "default"
- },
- "file": {
- "class": "logging.handlers.RotatingFileHandler",
- "level": "DEBUG",
- "formatter": "default",
- "filename": "./log/common.log",
- "mode": "a",
- "maxBytes": 20000000,
- "backupCount": 10
- }
- },
- "loggers": {
- "root": {
- "level": "DEBUG",
- "handlers": ["console", "file"],
- "propagate": false
- },
- "uvicorn.access": {
- "level": "INFO",
- "handlers": ["console", "file"],
- "propagate": false
- },
- "sqlalchemy.engine": {
- "level": "INFO",
- "handlers": ["console", "file"],
- "propagate": false
- }
- }
-}
\ No newline at end of file
diff --git a/API_SERVICE/meta_service/main.py b/API_SERVICE/meta_service/main.py
deleted file mode 100644
index 386b08eb..00000000
--- a/API_SERVICE/meta_service/main.py
+++ /dev/null
@@ -1,28 +0,0 @@
-import uvicorn
-from fastapi import FastAPI
-
-from meta_service.common.config import logger
-from meta_service.common.config import settings
-from meta_service.database.conn import db
-from meta_service.routes.v2 import autocomplete, els_data_search, els_bulk_update, els_upsert, els_index_create
-
-
-def create_app():
- app_ = FastAPI()
- logger.info(settings.dict())
- db.init_app(app_, **settings.dict())
-
- app_.include_router(autocomplete.router, prefix="/portal/api/meta")
- app_.include_router(els_data_search.router, prefix="/portal/api/meta")
- app_.include_router(els_bulk_update.router, prefix="/portal/api/meta")
- app_.include_router(els_upsert.router, prefix="/portal/api/meta")
- app_.include_router(els_index_create.router, prefix="/portal/api/meta")
-
- return app_
-
-
-app = create_app()
-
-
-if __name__ == "__main__":
- uvicorn.run("main:app", host="0.0.0.0", port=8090, reload=True)
diff --git a/API_SERVICE/meta_service/requirements.txt b/API_SERVICE/meta_service/requirements.txt
index e69de29b..b76eb6f1 100644
--- a/API_SERVICE/meta_service/requirements.txt
+++ b/API_SERVICE/meta_service/requirements.txt
@@ -0,0 +1,56 @@
+aiohttp==3.8.4
+aiosignal==1.3.1
+anyio==3.6.2
+APScheduler==3.10.1
+async-timeout==4.0.2
+attrs==23.1.0
+bcrypt==4.0.1
+boto3==1.28.67
+botocore==1.31.67
+certifi==2022.12.7
+cffi==1.15.1
+charset-normalizer==3.1.0
+click==8.1.3
+cryptography==40.0.2
+elastic-transport==8.4.0
+elasticsearch==8.7.0
+exceptiongroup==1.1.2
+fastapi==0.95.1
+frozenlist==1.3.3
+greenlet==2.0.2
+gunicorn==20.1.0
+h11==0.14.0
+httpcore==0.17.3
+httpx==0.24.1
+idna==3.4
+iniconfig==2.0.0
+jmespath==1.0.1
+multidict==6.0.4
+numpy==1.26.0
+packaging==23.1
+pandas==2.1.0
+passlib==1.7.4
+Pillow==10.0.1
+pluggy==1.2.0
+psycopg2-binary==2.9.9
+pycparser==2.21
+pycryptodome==3.18.0
+pydantic==1.10.7
+PyJWT==2.7.0
+pytest==7.4.0
+python-dateutil==2.8.2
+python-dotenv==1.0.0
+pytz==2023.3
+requests==2.31.0
+s3transfer==0.7.0
+six==1.16.0
+sniffio==1.3.0
+SQLAlchemy==2.0.22
+starlette==0.26.1
+tomli==2.0.1
+typing_extensions==4.5.0
+tzdata==2023.3
+tzlocal==5.0.1
+urllib3==1.26.15
+uvicorn==0.21.1
+yarl==1.8.2
diff --git a/API_SERVICE/meta_service/resources/mapping/els/biz_meta.json b/API_SERVICE/meta_service/resources/mapping/els/biz_meta.json
new file mode 100644
index 00000000..78d4edae
--- /dev/null
+++ b/API_SERVICE/meta_service/resources/mapping/els/biz_meta.json
@@ -0,0 +1,326 @@
+{
+ "settings":{
+ "queries.cache.enabled":"true",
+ "refresh_interval":"10s",
+ "max_shingle_diff":10,
+ "analysis":{
+ "tokenizer":{
+ "nori_user_dic":{
+ "type":"nori_tokenizer",
+ "decompound_mode":"discard",
+ "user_dictionary":"user_dic.txt"
+ }
+ },
+ "filter":{
+ "nori_pos":{
+ "type":"nori_part_of_speech",
+ "stoptags":[
+ "E",
+ "J",
+ "SC",
+ "SE",
+ "SF",
+ "SP",
+ "SSC",
+ "SSO",
+ "SY",
+ "VCN",
+ "VCP",
+ "VSV",
+ "VX",
+ "XPN",
+ "XSA",
+ "XSN",
+ "XSV"
+ ]
+ },
+ "synonym":{
+ "type":"synonym_graph",
+ "synonyms_path":"synonyms.txt"
+ },
+ "stopwords":{
+ "type":"stop",
+ "stopwords_path":"stopwords.txt"
+ },
+ "shingle_ten":{
+ "type":"shingle",
+ "token_separator":"",
+ "max_shingle_size":10
+ }
+ },
+ "analyzer":{
+ "korean_analyzer":{
+ "tokenizer":"nori_user_dic",
+ "filter":[
+ "nori_pos",
+ "nori_readingform",
+ "lowercase",
+ "synonym",
+ "stopwords",
+ "remove_duplicates",
+ "shingle_ten"
+ ]
+ }
+ }
+ }
+ },
+ "mappings":{
+ "properties":{
+ "biz_dataset_id":{
+ "type":"keyword"
+ },
+ "data_nm":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ },
+ "fielddata":true
+ },
+ "data_desc":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "ctgry_id":{
+ "type":"keyword"
+ },
+ "ctgry":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "data_prv_desk":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "license":{
+ "type":"text"
+ },
+ "data_shap":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "data_srttn":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "src_url":{
+ "type":"text"
+ },
+ "kywrd":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "data_updt_cyc":{
+ "type":"keyword"
+ },
+ "adm_dep":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "admr_nm":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "file_info":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "file_read_authority":{
+ "type":"keyword"
+ },
+ "status":{
+ "type":"keyword"
+ },
+ "reg_type":{
+ "type":"keyword"
+ },
+ "retv_num":{
+ "type":"long"
+ },
+ "lang":{
+ "type":"text"
+ },
+ "adm_dep_hp":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "updt_nxt_dt":{
+ "type":"date",
+ "format":"strict_date_optional_time_nanos||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
+ },
+ "updt_dt":{
+ "type":"date",
+ "format":"strict_date_optional_time_nanos||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
+ },
+ "reg_dt":{
+ "type":"date",
+ "format":"strict_date_optional_time_nanos||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
+ },
+ "process_dt":{
+ "type":"date",
+ "format":"strict_date_optional_time_nanos||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
+ },
+ "reg_user":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "amd_user":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "reg_date":{
+ "type":"date",
+ "format":"strict_date_optional_time_nanos||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
+ },
+ "amd_date":{
+ "type":"date",
+ "format":"strict_date_optional_time_nanos||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
+
+ },
+ "data_limit":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "othr_use_notes":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "data_global_nm":{
+ "type":"text"
+ },
+ "downl_num":{
+ "type":"long"
+ },
+ "attnt_data_num":{
+ "type":"long"
+ },
+ "share_num":{
+ "type":"long"
+ },
+ "contents":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "has_sample":{
+ "type":"text"
+ },
+ "has_html":{
+ "type":"text"
+ },
+ "analysis_cnt":{
+ "type":"integer"
+ },
+ "data_type":{
+ "type":"text"
+ },
+ "swagger_url":{
+ "type":"text"
+ },
+ "api_url":{
+ "type":"text"
+ },
+ "api_type":{
+ "type":"text"
+ },
+ "data_format":{
+ "type":"text"
+ },
+ "traffic_opt":{
+ "type":"text"
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/API_SERVICE/meta_service/resources/mapping/els/v_biz_meta_oversea_els.json b/API_SERVICE/meta_service/resources/mapping/els/v_biz_meta_oversea_els.json
new file mode 100644
index 00000000..1a72a019
--- /dev/null
+++ b/API_SERVICE/meta_service/resources/mapping/els/v_biz_meta_oversea_els.json
@@ -0,0 +1,162 @@
+{
+ "settings":{
+ "queries.cache.enabled":"true",
+ "refresh_interval":"10s",
+ "max_shingle_diff":10,
+ "analysis":{
+ "tokenizer":{
+ "nori_user_dic":{
+ "type":"nori_tokenizer",
+ "decompound_mode":"discard",
+ "user_dictionary":"user_dic.txt"
+ }
+ },
+ "filter":{
+ "nori_pos":{
+ "type":"nori_part_of_speech",
+ "stoptags":[
+ "E",
+ "J",
+ "SC",
+ "SE",
+ "SF",
+ "SP",
+ "SSC",
+ "SSO",
+ "SY",
+ "VCN",
+ "VCP",
+ "VSV",
+ "VX",
+ "XPN",
+ "XSA",
+ "XSN",
+ "XSV"
+ ]
+ },
+ "synonym":{
+ "type":"synonym_graph",
+ "synonyms_path":"synonyms.txt"
+ },
+ "stopwords":{
+ "type":"stop",
+ "stopwords_path":"stopwords.txt"
+ },
+ "shingle_ten":{
+ "type":"shingle",
+ "token_separator":"",
+ "max_shingle_size":10
+ }
+ },
+ "analyzer":{
+ "korean_analyzer":{
+ "tokenizer":"nori_user_dic",
+ "filter":[
+ "nori_pos",
+ "nori_readingform",
+ "lowercase",
+ "synonym",
+ "stopwords",
+ "remove_duplicates",
+ "shingle_ten"
+ ]
+ }
+ }
+ }
+ },
+ "mappings":{
+ "properties":{
+ "biz_dataset_id":{
+ "type":"keyword"
+ },
+ "data_nm":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ },
+ "fielddata":true
+ },
+ "data_desc":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "ctgry":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "re_ctgry":{
+ "type":"text"
+ },
+ "re_data_prv_desk":{
+ "type":"text"
+ },
+ "data_prv_desk":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "data_srttn":{
+ "type":"text",
+ "fields":{
+ "korean_analyzer":{
+ "type":"text",
+ "analyzer":"korean_analyzer",
+ "search_analyzer":"standard"
+ }
+ }
+ },
+ "updt_dt":{
+ "type":"date",
+ "format":"strict_date_optional_time_nanos||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
+ },
+ "reg_dt":{
+ "type":"date",
+ "format":"strict_date_optional_time_nanos||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
+ },
+ "update_time":{
+ "type":"date",
+ "format":"strict_date_optional_time_nanos||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss.S||yyyy-MM-dd HH:mm:ss||yyyy-MM-dd HH:mm:ss.SSS||date_hour_minute_second"
+ },
+ "data_global_nm":{
+ "type":"text"
+ },
+ "nation":{
+ "type":"text"
+ },
+ "data_type":{
+ "type":"text"
+ },
+ "has_json":{
+ "type":"text"
+ },
+ "retv_num":{
+ "type":"integer"
+ },
+ "share_num":{
+ "type":"integer"
+ }
+
+ }
+ }
+}
\ No newline at end of file
diff --git a/API_SERVICE/meta_service/resources/mapping/logstash/biz_meta.conf b/API_SERVICE/meta_service/resources/mapping/logstash/biz_meta.conf
new file mode 100644
index 00000000..3e1f5f65
--- /dev/null
+++ b/API_SERVICE/meta_service/resources/mapping/logstash/biz_meta.conf
@@ -0,0 +1,43 @@
+input {
+ jdbc {
+ jdbc_driver_library => "${LOGSTASH_JDBC_DRIVER_JAR_LOCATION}"
+ jdbc_driver_class => "${LOGSTASH_JDBC_DRIVER}"
+ jdbc_connection_string => "${LOGSTASH_JDBC_URL}"
+ jdbc_user => "${LOGSTASH_JDBC_USERNAME}"
+ jdbc_password => "${LOGSTASH_JDBC_PASSWORD}"
+ schedule => "*/5 * * * * *"
+ statement => "
+ SELECT
+ *
+ FROM
+ meta.v_biz_meta_info
+ WHERE
+ status = 'D'
+ AND
+ modified_dt > CURRENT_TIMESTAMP AT TIME ZONE 'Asia/Seoul' - INTERVAL '10 second';"
+
+ tags => ["data_input"]
+ }
+}
+
+filter {
+ date {
+ match => ["updt_dt", "yyyy-MM-dd HH:mm:ssSSS", "yyyy-MM-dd","yyyy-MM-dd HH:mm:ss.S","yyyy-MM-dd HH:mm:ss","yyyy-MM-dd HH:mm:ss.SSSSSS"]
+ timezone => "Asia/Seoul"
+ locale => "ko"
+ target => "updt_dt"
+ }
+}
+
+
+output {
+ if "data_input" in [tags] {
+ elasticsearch {
+ hosts => ["${LOGSTASH_ELASTICSEARCH_HOST}"]
+ index => "biz_meta"
+ document_id => "%{biz_dataset_id}"
+ doc_as_upsert => true
+ }
+ stdout { codec => json_lines }
+ }
+}
\ No newline at end of file
diff --git a/API_SERVICE/meta_service/resources/mapping/logstash/postgresql-42.7.0.jar b/API_SERVICE/meta_service/resources/mapping/logstash/postgresql-42.7.0.jar
new file mode 100644
index 00000000..87af5796
Binary files /dev/null and b/API_SERVICE/meta_service/resources/mapping/logstash/postgresql-42.7.0.jar differ
diff --git a/API_SERVICE/meta_service/resources/mapping/logstash/v_biz_meta_oversea_els.conf b/API_SERVICE/meta_service/resources/mapping/logstash/v_biz_meta_oversea_els.conf
new file mode 100644
index 00000000..623060dd
--- /dev/null
+++ b/API_SERVICE/meta_service/resources/mapping/logstash/v_biz_meta_oversea_els.conf
@@ -0,0 +1,41 @@
+input {
+ jdbc {
+ jdbc_driver_library => "${LOGSTASH_JDBC_DRIVER_JAR_LOCATION}"
+ jdbc_driver_class => "${LOGSTASH_JDBC_DRIVER}"
+ jdbc_connection_string => "${LOGSTASH_JDBC_URL}"
+ jdbc_user => "${LOGSTASH_JDBC_USERNAME}"
+ jdbc_password => "${LOGSTASH_JDBC_PASSWORD}"
+ schedule => "*/5 * * * * *"
+ statement => "
+ SELECT
+ *
+ FROM
+ meta.v_biz_meta_oversea_els
+ WHERE
+ modified_dt > CURRENT_TIMESTAMP AT TIME ZONE 'Asia/Seoul' - INTERVAL '10 second';"
+
+ tags => ["data_input"]
+ }
+}
+
+filter {
+ date {
+ match => ["updt_dt", "yyyy-MM-dd HH:mm:ssSSS", "yyyy-MM-dd","yyyy-MM-dd HH:mm:ss.S","yyyy-MM-dd HH:mm:ss","yyyy-MM-dd HH:mm:ss.SSSSSS"]
+ timezone => "Asia/Seoul"
+ locale => "ko"
+ target => "updt_dt"
+ }
+}
+
+
+output {
+ if "data_input" in [tags] {
+ elasticsearch {
+ hosts => ["${LOGSTASH_ELASTICSEARCH_HOST}"]
+ index => "v_biz_meta_oversea_els"
+ document_id => "%{biz_dataset_id}"
+ doc_as_upsert => true
+ }
+ stdout { codec => json_lines }
+ }
+}
\ No newline at end of file
diff --git a/API_SERVICE/meta_service/routes/v1/autocomplete.py b/API_SERVICE/meta_service/routes/v1/autocomplete.py
deleted file mode 100644
index 6ef18227..00000000
--- a/API_SERVICE/meta_service/routes/v1/autocomplete.py
+++ /dev/null
@@ -1,71 +0,0 @@
-import logging
-
-from fastapi import APIRouter
-
-from meta_service.database.conn import db
-from libs.database.connector import Connector
-
-from meta_service.ELKSearch.config import dev_server
-from meta_service.ELKSearch.model import CoreOption
-from meta_service.ELKSearch.Utils.base import make_format
-from meta_service.ELKSearch.Utils.document_utils import search_filter
-
-from meta_service.common.search import default_search_set, base_query
-
-
-from pydantic import BaseModel
-
-
-class Prefix(BaseModel):
- index: str
- size: int
- col_nm: str
- keyword: str
-
-
-router = APIRouter()
-
-logger = logging.getLogger()
-
-
-@router.post("/autocomplete", response_model=dict)
-def autocomplete(input: CoreOption, index: str, size: int):
- #input.field는 list 형식으로 받아야함, keywords는 string으로 받아야함
- try:
-
- docmanager = default_search_set(dev_server, index, size)
- if len(input.field) > 1:
- # multi field 일 때
- logger.info("multi_match")
- prefix_query = base_query(1, [input])
-
- for query_dict in prefix_query:
- query_dict["multi_match"].pop("operator",None)
-
- body = make_format("query","bool",{"must": prefix_query})
- docmanager.set_body(body)
- prefix_dict = search_filter(docmanager.find(input.field))
-
- if not len(prefix_dict):
- return {"result": 1,"data": []}
-
- prefix_data = [ word for data in prefix_dict for word in data.values() if input.keywords[0] in word]
- else:
- # 단일 field 일 때
- logger.info("prefix")
- field = input.field[0]
- query = {field: input.keywords}
- prefix_data = search_filter(docmanager.prefix(body=query, source=input.field))
-
- if not len(prefix_data):
- return {"result": 1,"data": []}
-
- prefix_data = [data[input.field[0]] for data in prefix_data]
-
- result = {"result": 1,"data": prefix_data}
-
- except Exception as e:
- result = {"result": 0, "errorMessage": str(e)}
- logger.error(e, exc_info=True)
-
- return result
diff --git a/API_SERVICE/meta_service/routes/v1/els_data_search.py b/API_SERVICE/meta_service/routes/v1/els_data_search.py
deleted file mode 100644
index 4d6ef476..00000000
--- a/API_SERVICE/meta_service/routes/v1/els_data_search.py
+++ /dev/null
@@ -1,52 +0,0 @@
-import logging
-
-from fastapi import APIRouter, Depends
-
-from meta_service.database.conn import db
-from libs.database.connector import Connector
-
-from meta_service.ELKSearch.config import dev_server
-from meta_service.ELKSearch.model import InputModel
-from meta_service.ELKSearch.Utils.base import make_format
-from meta_service.ELKSearch.Utils.document_utils import search_filter
-
-from meta_service.common.search import default_search_set, base_query
-
-
-router = APIRouter()
-
-logger = logging.getLogger()
-
-@router.post("/search")
-def search(input: InputModel, session: Connector = Depends(db.get_db)):
- """
-
- :param input:
- :return:
- """
- try:
- len_search = len(input.searchOption)
- len_filter = len(input.filterOption)
-
- # from_ 0 부터 시작해야함, web에서는 1부터 넘어오기 때문에 1을 빼준다
- docmanager = default_search_set(dev_server, input.index, input.size, input.from_ - 1)
-
- # query에 조건이 없으면 match all 실행
- if not any([len_filter,len_search]):
- body = make_format("query","match_all",dict())
- else:
- search_query = base_query(len_search, input.searchOption)
- filter_query = base_query(len_filter, input.filterOption)
- body = make_format("query","bool", {"must": search_query,"filter": filter_query})
-
- docmanager.set_body(body)
- data = {
- "header": session.get_column_info(input.index.upper()),
- "count": docmanager.count(body),
- "body": search_filter(docmanager.find(input.resultField))
- }
- result = {"result": 1, "data": data}
- except Exception as e:
- result = {"result": 0, "errorMessage": str(e)}
- logger.error(e, exc_info=True)
- return result
diff --git a/API_SERVICE/meta_service/routes/v1/els_update.py b/API_SERVICE/meta_service/routes/v1/els_update.py
deleted file mode 100644
index 4550d8b7..00000000
--- a/API_SERVICE/meta_service/routes/v1/els_update.py
+++ /dev/null
@@ -1,56 +0,0 @@
-import logging
-import decimal
-
-from fastapi import Depends, APIRouter
-
-from meta_service.database.conn import db
-from libs.database.connector import Connector
-
-from meta_service.ELKSearch.config import dev_server
-from meta_service.common.search import default_search_set
-
-
-router = APIRouter()
-
-logger = logging.getLogger()
-
-
-@router.post("/els-update", response_model=dict)
-def els_update(table_name: str, session: Connector = Depends(db.get_db)):
- index = table_name.lower()
-
- data_query = "SELECT {0} FROM {1};"
- col_query = f"SELECT column_name FROM all_tab_columns WHERE table_name = '{table_name}';"
- try:
- cur = session.conn.cursor()
- cur.execute(col_query)
- # columns ['IDX', 'CONM', 'DFNMTRAIDX', 'BZRGSTCD', 'PRCCTCD',
- # 'COCTIDX', 'PSNIDX', 'PSNNM', 'PHONN', 'OFFRDT', 'ENDDT',
- # 'MDFCDT', 'IFOFFRYN', 'SALPRC', 'SYSTDVL', 'CORETC', 'NRTOFFR', 'COCT', 'DFNMTRA']
- columns = [col_nm[0] for col_nm in cur.fetchall()]
-
- data_query = data_query.format(",".join(columns), table_name)
- cur.execute(data_query)
-
- docmanager = default_search_set(dev_server, index)
-
- # insert_dataset = []
- for row in cur.fetchall():
- insert_body = dict()
- for i in range(0,len(columns)):
- col = columns[i].lower()
- if type(row[i]) == decimal.Decimal:
- insert_body[col] = int(row[i])
- else:
- insert_body[col] = row[i]
-
- docmanager.set_body(insert_body)
- logger.info(docmanager.insert(insert_body["idx"]))
- # insert_dataset.append(insert_body)
- # logger.info(len(insert_dataset))
- result = {"result":1,"data": "test"}
-
- except Exception as e:
- result = {"result": 0, "errorMessage": str(e)}
- logger.error(e, exc_info=True)
- return result
\ No newline at end of file
diff --git a/API_SERVICE/meta_service/routes/v2/autocomplete.py b/API_SERVICE/meta_service/routes/v2/autocomplete.py
deleted file mode 100644
index ce74ac72..00000000
--- a/API_SERVICE/meta_service/routes/v2/autocomplete.py
+++ /dev/null
@@ -1,85 +0,0 @@
-import logging
-
-from fastapi import APIRouter
-
-from meta_service.database.conn import db
-from libs.database.connector import Connector
-
-from meta_service.ELKSearch.config import dev_server
-from meta_service.ELKSearch.model import CoreOption
-from meta_service.ELKSearch.Utils.base import make_format
-from meta_service.ELKSearch.Utils.document_utils import search_filter
-
-from meta_service.common.search import default_search_set, base_query
-
-
-from pydantic import BaseModel
-
-
-class Prefix(BaseModel):
- index: str
- size: int
- fields: list
- query: str
-
-
-router = APIRouter()
-
-logger = logging.getLogger()
-
-
-@router.post("/autocomplete", response_model=dict)
-def autocomplete(input: Prefix):
- """
- :param input:
- {
- "index": "index_name",
- "size": 5,
- "fields": [
- "col1", "col2"
- ],
- "query": "search keyword"
- }
- :return:
- {
- "result": 1,
- "data": ["data1","data2"..."data5"]
- }
- """
- try:
- keyword = input.query
- docmanager = default_search_set(dev_server, input.index, input.size)
- input.query = f"(*{input.query}*)"
- del input.index
- del input.size
-
- # body = make_format("query","query_string",input.dict())
- search_query = {"query_string": input.dict()}
-
- body = {
- "query": {
- "bool": {
- "must": [search_query]
- }
- }
- }
- if docmanager.index == "vw_co_if":
- filter_query = make_format("term","showyn","y")
- body["query"]["bool"]["filter"] = [filter_query]
-
-
- logger.info(body)
- docmanager.set_body(body)
- prefix_dict = search_filter(docmanager.find(input.fields))
-
- if not len(prefix_dict):
- return {"result": 1,"data": []}
-
- prefix_data = [ word for data in prefix_dict for word in data.values() if keyword in word]
- # 데이터셋에서 해당 되는 데이터가 여러개 있을 수 있어 prefix_data에 size를 줌
- result = {"result": 1,"data": prefix_data[:docmanager.size]}
- except Exception as e:
- result = {"result": 0, "errorMessage": str(e)}
- logger.error(e, exc_info=True)
-
- return result
diff --git a/API_SERVICE/meta_service/routes/v2/els_bulk_update.py b/API_SERVICE/meta_service/routes/v2/els_bulk_update.py
deleted file mode 100644
index 4e8ab13e..00000000
--- a/API_SERVICE/meta_service/routes/v2/els_bulk_update.py
+++ /dev/null
@@ -1,51 +0,0 @@
-import logging
-import decimal
-
-from fastapi import Depends, APIRouter
-
-from meta_service.database.conn import db
-from libs.database.connector import Connector
-from meta_service.common.config import settings
-
-from meta_service.ELKSearch.config import dev_server
-from meta_service.common.search import default_search_set, exception_col
-
-
-router = APIRouter()
-
-logger = logging.getLogger()
-
-
-@router.post("/bulk_update", response_model=dict)
-def els_update(index: str, session: Connector = Depends(db.get_db)):
-
- # data_query = "SELECT {0} FROM {1};"
- data_query = {"table_nm": index}
-
- try:
- cur = session.conn.cursor()
- column_dict = session.get_column_info(index, settings.DB_INFO.SCHEMA)
- columns = [col["column_name"] for col in column_dict]
- rows = session.query(**data_query).all()[0]
-
- docmanager = default_search_set(dev_server, index)
-
- insert_dataset = []
- for row in rows:
- insert_body = dict()
- for i in range(0,len(columns)):
- if type(row[columns[i]]) ==decimal.Decimal:
- insert_body[columns[i]] = int(row[columns[i]])
- else:
- insert_body[columns[i]] = row[columns[i]]
-
- insert_body = exception_col(index,insert_body)
- docmanager.set_body(insert_body)
- logger.info(docmanager.insert(insert_body["idx"]))
- result = {"result":1,"data": "test"}
-
- except Exception as e:
- result = {"result": 0, "errorMessage": str(e)}
- logger.error(e, exc_info=True)
- return result
-
diff --git a/API_SERVICE/meta_service/routes/v2/els_data_search.py b/API_SERVICE/meta_service/routes/v2/els_data_search.py
deleted file mode 100644
index 55480852..00000000
--- a/API_SERVICE/meta_service/routes/v2/els_data_search.py
+++ /dev/null
@@ -1,94 +0,0 @@
-import logging
-
-from fastapi import APIRouter, Depends
-
-from meta_service.database.conn import db
-from libs.database.connector import Connector
-from meta_service.common.config import settings
-
-from meta_service.ELKSearch.config import dev_server
-from meta_service.ELKSearch.model import InputModel
-from meta_service.ELKSearch.Utils.base import make_format
-from meta_service.ELKSearch.Utils.document_utils import search_filter
-
-from meta_service.common.search import default_search_set, base_query
-
-
-router = APIRouter()
-
-logger = logging.getLogger()
-
-@router.post("/search")
-def search(input: InputModel, session: Connector = Depends(db.get_db)):
- """
- :param input:
- {
- "index": "index_name",
- "from": 0, # page
- "size": 10, # result size
- "resultField": ["col1", "col2"],
- "sortOption": [{"col": "desc"}],
- "searchOption": [
- {
- "field": ["conm"],
- "operator": "OR",
- "keywords": ["기업명"]
- }
- ],
- "filterOption": []
- }
- :return:
- {
- "result": 1,
- "data": {
- "header": {"column_name": "col1", "kor_column_name": "컬럼명1"},
- "count": "10", # total count
- "body": [{data set 1}, {data set 2} ... {data set 10}]
- }
- }
- """
- try:
- len_search = len(input.searchOption)
- len_filter = len(input.filterOption)
- len_range = len(input.rangeOption)
-
- # from_ 0 부터 시작해야함, web에서는 1부터 넘어오기 때문에 1을 빼준다
- docmanager = default_search_set(dev_server, input.index, input.size, input.from_ - 1)
-
- # query에 조건이 없으면 match all 실행
- if not any([len_filter, len_search, len_range]):
- body = make_format("query","match_all",dict())
- else:
- search_format = "(*{0}*)"
- search_query = []
- for query in input.searchOption:
- keywords = [search_format.format(word) for keyword in query.keywords for word in keyword.split(" ")]
- if len(keywords) > 1:
- keywords = f" {query.operator.upper()} ".join(keywords)
- else:
- keywords = keywords[0]
- search_query.append({"query_string": {"query": keywords ,"fields": query.field}})
- logger.info(search_query)
-
- # search_query = base_query(len_search, input.searchOption)
- filter_query = base_query(len_filter, input.filterOption)
-
- # range option
- for query in input.rangeOption:
- filter_query.append(make_format("range",query.field,query.compare_dict))
- logger.info(filter_query)
-
- body = make_format("query","bool", {"must": search_query,"filter": filter_query})
- logger.info(body)
-
- docmanager.set_body(body)
- data = {
- "header": session.get_column_info(input.index, settings.DB_INFO.SCHEMA),
- "count": docmanager.count(body),
- "body": search_filter(docmanager.find(input.resultField))
- }
- result = {"result": 1, "data": data}
- except Exception as e:
- result = {"result": 0, "errorMessage": str(e)}
- logger.error(e, exc_info=True)
- return result
diff --git a/API_SERVICE/meta_service/routes/v2/els_index_create.py b/API_SERVICE/meta_service/routes/v2/els_index_create.py
deleted file mode 100644
index 39a6027b..00000000
--- a/API_SERVICE/meta_service/routes/v2/els_index_create.py
+++ /dev/null
@@ -1,29 +0,0 @@
-import logging
-import decimal
-
-from fastapi import Depends, APIRouter
-
-from meta_service.ELKSearch.config import dev_server
-from meta_service.ELKSearch.Utils.base import set_els
-from meta_service.ELKSearch.index import Index
-
-router = APIRouter()
-
-logger = logging.getLogger()
-
-
-@router.post("/els-index-create", response_model=dict)
-def els_update(index: str):
- try:
- es = set_els(dev_server)
- ind_manager = Index(es)
- indices = ind_manager.all_index().keys()
- if index not in indices:
- logger.info(ind_manager.create(index))
- result = {"result": 1,"data": "success"}
-
- except Exception as e:
- result = {"result": 0, "errorMessage": str(e)}
- logger.error(e, exc_info=True)
- return result
-
diff --git a/API_SERVICE/meta_service/routes/v2/els_upsert.py b/API_SERVICE/meta_service/routes/v2/els_upsert.py
deleted file mode 100644
index d00e7a2b..00000000
--- a/API_SERVICE/meta_service/routes/v2/els_upsert.py
+++ /dev/null
@@ -1,59 +0,0 @@
-import logging
-import decimal
-
-from fastapi import Depends, APIRouter
-
-from meta_service.database.conn import db
-from libs.database.connector import Connector
-from meta_service.common.config import settings
-
-from meta_service.ELKSearch.config import dev_server
-from meta_service.common.search import default_search_set, Upsert, exception_col
-
-
-router = APIRouter()
-
-logger = logging.getLogger()
-
-
-@router.post("/els-upsert", response_model=dict)
-def els_update(input: Upsert, session: Connector = Depends(db.get_db)):
-
- data_query = {
- "table_nm": input.index,
- "where_info": [{
- "table_nm": input.index,
- "key": input.key,
- "value": input.ids,
- "compare_op": "in",
- "op": ""
- }]
- }
- logger.info(data_query)
- try:
- cur = session.conn.cursor()
- column_dict = session.get_column_info(input.index, settings.DB_INFO.SCHEMA)
- columns = [col["column_name"] for col in column_dict]
-
- rows = session.query(**data_query).all()[0]
- docmanager = default_search_set(dev_server, input.index)
-
- insert_dataset = []
- for row in rows:
- insert_body = dict()
- for i in range(0,len(columns)):
- if type(row[columns[i]]) ==decimal.Decimal:
- insert_body[columns[i]] = int(row[columns[i]])
- else:
- insert_body[columns[i]] = row[columns[i]]
-
- insert_body = exception_col(input.index,insert_body)
- docmanager.set_body(insert_body)
- doc_id = insert_body[input.key]
- logger.info(docmanager.update(doc_id))
- result = {"result":1,"data": "test"}
-
- except Exception as e:
- result = {"result": 0, "errorMessage": str(e)}
- logger.error(e, exc_info=True)
- return result
\ No newline at end of file
diff --git a/API_SERVICE/mydisk_service/app/__init__.py b/API_SERVICE/mydisk_service/app/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/API_SERVICE/mydisk_service/app/common/__init__.py b/API_SERVICE/mydisk_service/app/common/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/API_SERVICE/mydisk_service/app/common/config.py b/API_SERVICE/mydisk_service/app/common/config.py
new file mode 100644
index 00000000..a21da5da
--- /dev/null
+++ b/API_SERVICE/mydisk_service/app/common/config.py
@@ -0,0 +1,163 @@
+import logging.config
+import os
+from functools import lru_cache
+from typing import Optional
+from urllib.parse import quote
+
+from pydantic import BaseSettings, PostgresDsn
+
+base_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+print(f"mydisk base_dir :: {base_dir}")
+
+
+class DBInfo(BaseSettings):
+ DB_POOL_RECYCLE: int = 900
+ DB_ECHO: bool = True
+ DB_URL: str
+
+
+class PGInfo(DBInfo):
+ SCHEMA: str
+
+ class Config:
+ env_file = f"{base_dir}/.env"
+ env_file_encoding = "utf-8"
+
+
+class KeycloakInfo(BaseSettings):
+ keycloak_url: Optional[str]
+ admin_username: Optional[str]
+ admin_password: Optional[str]
+ realm: Optional[str]
+ client_id: Optional[str]
+ client_secret: Optional[str]
+
+ class Config:
+ env_file = f"{base_dir}/.env"
+ env_file_encoding = "utf-8"
+
+
+class MydiskInfo(BaseSettings):
+ ROOT_DIR: str
+
+ mydisk_url: Optional[str]
+ admin_username: Optional[str]
+ admin_password: Optional[str]
+ scope: Optional[str]
+ client_id: Optional[str]
+ client_secret: Optional[str]
+
+ class Config:
+ env_file = f"{base_dir}/.env"
+ env_file_encoding = "utf-8"
+
+
+class Settings(BaseSettings):
+ BASE_DIR = base_dir
+ RELOAD: bool
+ TESTING: bool
+
+ DB_INFO: DBInfo
+ KEYCLOAK_INFO: KeycloakInfo
+ MYDISK_INFO: MydiskInfo
+
+ S3_URL: str
+ S3KEY: str
+ S3SECRET: str
+
+
+class ProdSettings(Settings):
+ TESTING: bool = False
+ RELOAD: bool = False
+
+ DB_INFO: PGInfo = PGInfo()
+ KEYCLOAK_INFO: KeycloakInfo = KeycloakInfo()
+ MYDISK_INFO: MydiskInfo() = MydiskInfo()
+
+ class Config:
+ env_file = f"{base_dir}/.env"
+ env_file_encoding = "utf-8"
+
+
+class LocalSettings(Settings):
+ TESTING: bool = False
+ RELOAD: bool = False
+
+ DB_INFO = PGInfo(
+ DB_POOL_RECYCLE=900,
+ DB_ECHO=False,
+ SCHEMA="sitemng,users,meta,iag,ckan,board,analysis",
+ DB_URL=str(
+ PostgresDsn.build(
+ scheme="postgresql",
+ host="localhost",
+ port="5432",
+ user="dpmanager",
+ password=quote("hello.dp12#$", safe=""),
+ path="/dataportal",
+ )
+ ),
+ )
+
+ KEYCLOAK_INFO = KeycloakInfo(
+ keycloak_url="https://auth.bigdata-car.kr",
+ admin_username="admin",
+ admin_password="2021@katech",
+ realm="mobigen",
+ client_id="katech",
+ client_secret="pwLZG5EaWph1nJAOjwYJ32YGtXdAj5SL",
+ )
+
+ MYDISK_INFO = MydiskInfo(
+ ROOT_DIR="./",
+ mydisk_url="https://mydisk.bigdata-car.kr",
+ admin_username="superuser",
+ admin_password="35ldxxhbd1",
+ scope="download",
+ client_id="86e9aaff5afc7d7828035500e11cb48c",
+ client_secret="lfb5RQK9SH3GcRqGgq0QcLlW5mJf0JDBNkrn1729",
+ )
+
+ S3_URL: str = "http://10.10.30.51:8085"
+ S3KEY = ""
+ S3SECRET = ""
+
+
+@lru_cache
+def get_settings() -> Settings:
+ env = os.getenv("APP_ENV", "prod")
+ print(env)
+ return {"local": LocalSettings(), "prod": ProdSettings()}[env]
+
+
+settings = get_settings()
+print(settings)
+
+log_config = {
+ "version": 1,
+ "disable_existing_loggers": False,
+ "formatters": {
+ "standard": {"format": "%(asctime)s %(levelname)s [%(filename)s:%(lineno)d] - %(message)s"},
+ },
+ "handlers": {
+ "console_handler": {
+ "class": "logging.StreamHandler",
+ "level": "DEBUG",
+ "formatter": "standard",
+ },
+ },
+ "root": {"level": "DEBUG", "handlers": ["console_handler"], "propagate": False},
+}
+
+if "prod" == os.getenv("APP_ENV", "prod"):
+ log_config["handlers"]["file_handler"] = {
+ "class": "logging.handlers.RotatingFileHandler",
+ "filename": os.path.join(base_dir, "log", "mydisk.log"),
+ "mode": "a",
+ "maxBytes": 20000000,
+ "backupCount": 10,
+ "level": "INFO",
+ "formatter": "standard",
+ }
+ log_config["root"]["handlers"].append("file_handler")
+logging.config.dictConfig(log_config)
diff --git a/API_SERVICE/mydisk_service/app/common/const.py b/API_SERVICE/mydisk_service/app/common/const.py
new file mode 100644
index 00000000..e69de29b
diff --git a/API_SERVICE/mydisk_service/app/database/__init__.py b/API_SERVICE/mydisk_service/app/database/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/API_SERVICE/mydisk_service/app/database/conn.py b/API_SERVICE/mydisk_service/app/database/conn.py
new file mode 100644
index 00000000..b39f27a1
--- /dev/null
+++ b/API_SERVICE/mydisk_service/app/database/conn.py
@@ -0,0 +1,6 @@
+from sqlalchemy.ext.automap import automap_base
+
+from libs.database.orm import SQLAlchemyConnector
+
+Base = automap_base()
+db = SQLAlchemyConnector(Base)
diff --git a/API_SERVICE/mydisk_service/app/main.py b/API_SERVICE/mydisk_service/app/main.py
new file mode 100644
index 00000000..3c85939c
--- /dev/null
+++ b/API_SERVICE/mydisk_service/app/main.py
@@ -0,0 +1,32 @@
+import logging
+
+import uvicorn
+from fastapi import FastAPI
+
+from libs.auth.keycloak import keycloak
+from libs.disk.mydisk import mydisk
+from mydisk_service.app.common.config import settings
+from mydisk_service.app.database.conn import db
+from mydisk_service.app.routes.v1 import disk, s3
+
+logger = logging.getLogger()
+
+
+def create_app():
+ app_ = FastAPI()
+ print(settings.dict())
+ db.init_app(app_, **settings.dict())
+ keycloak.set_url(settings.KEYCLOAK_INFO.keycloak_url)
+ mydisk.set_url(settings.MYDISK_INFO.mydisk_url)
+
+ app_.include_router(disk.router, prefix="/portal/api/mydisk")
+ app_.include_router(s3.router, prefix="/portal/api/s3")
+
+ return app_
+
+
+app = create_app()
+
+
+if __name__ == "__main__":
+ uvicorn.run("main:app", host="0.0.0.0", port=8090, reload=True)
diff --git a/API_SERVICE/mydisk_service/app/routes/__init__.py b/API_SERVICE/mydisk_service/app/routes/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/API_SERVICE/mydisk_service/app/routes/v1/__init__.py b/API_SERVICE/mydisk_service/app/routes/v1/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/API_SERVICE/mydisk_service/app/routes/v1/disk.py b/API_SERVICE/mydisk_service/app/routes/v1/disk.py
new file mode 100644
index 00000000..b3e1071c
--- /dev/null
+++ b/API_SERVICE/mydisk_service/app/routes/v1/disk.py
@@ -0,0 +1,320 @@
+import base64
+import glob
+import logging
+import os
+import shutil
+import zipfile
+from io import BytesIO
+from pathlib import Path
+from typing import Optional, Union, Dict
+
+import pandas as pd
+from PIL import Image
+from fastapi import APIRouter
+from pydantic import BaseModel
+from starlette.responses import FileResponse
+
+from libs.disk.mydisk import mydisk
+from mydisk_service.app.common.config import settings
+
+logger = logging.getLogger()
+
+
+class UserParams(BaseModel):
+ uuid: str
+
+ def get_path(self) -> Path:
+ return Path(
+ os.path.join(
+ settings.MYDISK_INFO.ROOT_DIR,
+ "USER",
+ self.uuid,
+ )
+ )
+
+
+class DownloadParams(BaseModel):
+ src_target_path: str
+
+ def get_path(self) -> Path:
+ return Path(
+ os.path.join(
+ settings.MYDISK_INFO.ROOT_DIR,
+ self.src_target_path.lstrip("/") if self.src_target_path.startswith("/") else self.src_target_path,
+ )
+ )
+
+
+class CopyParams(BaseModel):
+ src_path: str
+ force: Union[bool, str]
+ dst_path: str
+
+ def is_force(self):
+ if isinstance(self.force, bool):
+ return self.force
+ elif self.force in ("yes", "true", "t", "y", "1"):
+ return True
+ return False
+
+ def get_src(self):
+ return os.path.join(
+ settings.MYDISK_INFO.ROOT_DIR, self.src_path.lstrip("/") if self.src_path.startswith("/") else self.src_path
+ )
+
+ def get_dst(self):
+ return os.path.join(
+ settings.MYDISK_INFO.ROOT_DIR, self.dst_path.lstrip("/") if self.dst_path.startswith("/") else self.dst_path
+ )
+
+
+class TreeParams(BaseModel):
+ target_directory: str
+
+ def get_path(self) -> Path:
+ print(f"target path :: {settings.MYDISK_INFO.ROOT_DIR}")
+ return Path(
+ os.path.join(
+ settings.MYDISK_INFO.ROOT_DIR,
+ self.target_directory.lstrip("/") if self.target_directory.startswith("/") else self.target_directory,
+ )
+ )
+
+
+class LabelParams(BaseModel):
+ data_set_id: str
+
+ def get_path(self) -> Path:
+ return Path(os.path.join(settings.MYDISK_INFO.ROOT_DIR, "ADMIN", self.data_set_id, "LABEL_DATA"))
+
+
+class PreviewParam(BaseModel):
+ target_file_directory: str
+ width: Optional[int] = 90
+ height: Optional[int] = 90
+ rows: int
+
+ def get_path(self) -> Path:
+ return Path(
+ os.path.join(
+ settings.MYDISK_INFO.ROOT_DIR,
+ self.target_file_directory.lstrip("/")
+ if self.target_file_directory.startswith("/")
+ else self.target_file_directory,
+ )
+ )
+
+
+router = APIRouter()
+
+
+@router.post("/v1/preview")
+async def head(params: PreviewParam):
+ try:
+ path = params.get_path()
+ width = params.width
+ height = params.height
+ suffix = path.suffix[1:].lower()
+ lines = params.rows
+ logger.info(f"path :: {path}")
+ file_type = "txt"
+
+ if suffix in ["jpg", "jpeg", "png", "gif", "tiff", "tif", "bmp"]:
+ file_type = "image"
+ byte_str = BytesIO()
+ thumb_image = Image.open(path)
+ thumb_image.thumbnail((width, height))
+ thumb_image.save(byte_str, format="png")
+ image_base64str = base64.b64encode(byte_str.getvalue())
+ logger.info(f"image str :: {image_base64str[:30]}...")
+ contents = image_base64str
+ else: # txt, csv
+ df = pd.read_excel(path, header=None) if suffix in ["xls", "xlsx"] else pd.read_csv(path, header=None)
+ df = df.fillna("")
+ contents = df[:lines].values.tolist()
+
+ result = {"result": 1, "errorMessage": "", "data": {"body": contents}, "type": file_type}
+ except Exception as e:
+ result = {"result": 1, "errorMessage": str(e)}
+ return result
+
+
+@router.post("/v1/listdir")
+async def walk(param: TreeParams) -> Dict:
+ id = 0
+
+ def nodes(p: Path):
+ nonlocal id
+ lst = []
+ for i in p.iterdir():
+ id += 1
+ data = {"text": i.name, "id": id, "type": "file"}
+ if i.is_dir():
+ node = nodes(i)
+ if node:
+ data["nodes"] = node
+ data["type"] = "directory"
+
+ lst.append(data)
+ return lst
+
+ try:
+ result = {"result": 1, "errorMessage": "", "data": {"body": nodes(param.get_path())}}
+ except FileNotFoundError as fe:
+ result = {"result": 0, "errorMessage": str(fe), "data": []}
+ except Exception as e:
+ result = {"result": 0, "errorMessage": str(e), "data": []}
+ return result
+
+
+@router.post("/v1/link")
+async def hardlink(params: CopyParams):
+ try:
+ run(params.get_src(), params.get_dst(), params.is_force(), False)
+ result = {"result": 1, "errorMessage": "", "data": {"body": 200}}
+ except Exception as e:
+ result = {"result": 0, "errorMessage": str(e)}
+ return result
+
+
+@router.post("/v1/copy")
+async def copy(params: CopyParams):
+ try:
+ run(params.get_src(), params.get_dst(), params.is_force(), True)
+ result = {"result": 1, "errorMessage": "", "data": {"body": 200}}
+ except Exception as e:
+ result = {"result": 0, "errorMessage": str(e)}
+ return result
+
+
+@router.post("/v1/download")
+async def download(params: DownloadParams):
+ src_path = params.get_path()
+ logger.info(f"param src_path :: {src_path}")
+ try:
+ # dir 이면 zip 으로 압축함
+ if os.path.isdir(src_path):
+ os.chdir(src_path) # 압축 파일 생성할 폴더로 working directory 를 이동시킨다
+ byte_io = BytesIO()
+
+ zip_file = zipfile.ZipFile(byte_io, "w")
+ for (path, dir, files) in os.walk(src_path):
+ for file in files:
+ logger.info(f"Adding file :: {file}")
+ # 상대경로를 활용하여 압축한다. (os.path.relpath)
+ zip_file.write(
+ os.path.join(os.path.relpath(path, src_path), file), compress_type=zipfile.ZIP_DEFLATED
+ )
+
+ zip_file.close()
+ # zip 파일을 읽도록 주소 변경
+ decode_data = base64.b64encode(byte_io.getvalue()).decode()
+ else:
+ read_file = open(src_path, "rb").read()
+ decode_data = base64.b64encode(read_file).decode()
+
+ logger.info(f"decode_data :: {decode_data[:20]} ..")
+ result = {"result": 1, "errorMessage": "", "data": {"body": decode_data}}
+ except Exception as e:
+ result = {"result": 0, "errorMessage": str(e)}
+ return result
+
+
+@router.post("/v1/user")
+async def create_user_dir(params: UserParams):
+ target_path = params.get_path()
+ logger.info(f"param target_path :: {target_path}")
+ dirs = ["favorite", "upload", "purchase"]
+ try:
+ # 세개의 디렉토리 미리 생성
+ for dir in dirs:
+ os.makedirs(f"{target_path}/{dir}")
+ result = {"result": 1, "errorMessage": "", "data": "success"}
+ except Exception as e:
+ logger.error(e)
+ result = {"result": 0, "errorMessage": str(e)}
+ return result
+
+
+@router.post("/v1/labelSearch")
+async def label(params: LabelParams):
+ def listFiles(p: Path):
+ ret = []
+ lst = []
+ for i in p.iterdir():
+ if i.is_dir():
+ listFile = listFiles(i)
+ if listFile:
+ lst = lst + listFile
+
+ imageDatas = sorted(glob.glob(rf"{i}/*[.jpg][.png][.gif]"))
+ imageCount = len(imageDatas)
+ if imageCount > 0:
+ folderName = f"{i}".split("/")[-2]
+ for f in imageDatas:
+ fileArr = f.split("/")
+ fileName = fileArr[-1].split(".")[0]
+ refUrl = "/".join(fileArr[fileArr.index("raw") + 1 :])
+ index = f"{int(fileName)}"
+ data = [folderName, refUrl, index]
+ ret.append(data)
+ lst = lst + ret
+ return lst
+
+ source_path = params.get_path()
+ logger.info(f"param source_path :: {source_path}")
+ try:
+ result = {"result": 1, "errorMessage": "", "data": {"body": listFiles(source_path)}}
+ except FileNotFoundError as fe:
+ result = {"result": 0, "errorMessage": str(fe), "data": []}
+ except Exception as e:
+ result = {"result": 0, "errorMessage": str(e), "data": []}
+ return result
+
+
+@router.get("/v1/linkImage/")
+async def linkImage(dataset_id: str, fileloc: str):
+ fileBase = "/home/deep/workspace/ysw/katech/filebrowser_datas/file_data/ADMIN/"
+ imageSrc = f"{fileBase}/{dataset_id}/LABEL_DATA/raw/{fileloc}"
+ try:
+ return FileResponse(imageSrc)
+ except Exception as e:
+ logger.info(str(e))
+
+
+def is_dir(src_path):
+ return os.path.isdir(
+ os.path.join(settings.MYDISK_INFO.ROOT_DIR, src_path.lstrip("/") if src_path.startswith("/") else src_path)
+ )
+
+
+def run(src_path, dst_path, is_force, is_copy):
+ os.makedirs(os.path.dirname(dst_path), exist_ok=True)
+
+ if os.path.exists(dst_path):
+ if is_force:
+ shutil.rmtree(dst_path)
+ else:
+ raise Exception("alreay exist")
+
+ if os.path.isfile(src_path):
+ os.link(src_path, dst_path)
+ else:
+ shutil.copytree(
+ src=src_path,
+ dst=dst_path,
+ dirs_exist_ok=is_force,
+ copy_function=os.link if not is_copy else shutil.copy2,
+ )
+
+
+async def get_admin_token() -> None:
+ res = await mydisk.generate_admin_token(
+ username=settings.MYDISK_INFO.admin_username,
+ password=settings.MYDISK_INFO.admin_password,
+ scope=settings.MYDISK_INFO.scope,
+ client_id=settings.MYDISK_INFO.client_id,
+ client_secret=settings.MYDISK_INFO.client_secret,
+ )
+
+ return res.get("data").get("access_token")
diff --git a/API_SERVICE/mydisk_service/app/routes/v1/s3.py b/API_SERVICE/mydisk_service/app/routes/v1/s3.py
new file mode 100644
index 00000000..0d171928
--- /dev/null
+++ b/API_SERVICE/mydisk_service/app/routes/v1/s3.py
@@ -0,0 +1,162 @@
+import json
+import logging
+import os
+from datetime import datetime
+from typing import Optional
+
+import boto3
+from botocore.exceptions import ClientError
+from fastapi import APIRouter, Depends
+from starlette.responses import JSONResponse
+
+from mydisk_service.app.common.config import settings
+
+router = APIRouter(prefix="/v1")
+logger = logging.getLogger()
+
+
+def convert_datetime_to_str(obj):
+ if isinstance(obj, datetime):
+ return obj.strftime("%a, %d %b %Y %H:%M:%S GMT")
+
+
+def get_s3_client():
+ s3 = boto3.client("s3", aws_access_key_id=settings.S3KEY, aws_secret_access_key=settings.S3SECRET, endpoint_url=settings.S3_URL)
+ try:
+ yield s3
+ finally:
+ s3.close()
+
+
+@router.get("/bucket/list")
+async def get_bucket_list(s3=Depends(get_s3_client)):
+ response = s3.list_buckets()
+ logger.debug(f"list bucket :: {response}")
+ buckets = json.dumps(response["Buckets"], default=convert_datetime_to_str)
+ logger.debug(buckets)
+ return JSONResponse(
+ status_code=200,
+ content={
+ "result": 1,
+ "errorMessage": "",
+ "data": {"body": buckets},
+ },
+ )
+
+
+@router.get("/bucket/info")
+async def bucket_info(bucket_name: str, s3=Depends(get_s3_client)):
+ try:
+ res = s3.head_bucket(Bucket=bucket_name)
+ return JSONResponse(status_code=200, content={"result": 1, "errorMessage": "", "data": {"body": res}})
+ except ClientError as e:
+ error_code = e.response["Error"]["Code"]
+ if error_code == "404":
+ logger.debug(f"Bucket {bucket_name} does not exist")
+ return JSONResponse(
+ status_code=error_code, content={"result": 1, "errorMessage": f"{bucket_name} not found"}
+ )
+ else:
+ logger.error(f"Error checking bucket existence: {e}")
+ return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
+
+
+@router.post("/bucket")
+async def create_bucket(bucket_name: str, s3=Depends(get_s3_client)):
+ try:
+ s3.create_bucket(Bucket=bucket_name)
+ logger.info(f"Bucket {bucket_name} created successfully")
+
+ return JSONResponse(status_code=201, content={"result": 1, "errorMessage": ""})
+ except ClientError as e:
+ logger.error(f"Error creating bucket: {e}")
+ return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
+
+
+@router.post("/bucket/del")
+async def delete_bucket(bucket_name: str, s3=Depends(get_s3_client)):
+ try:
+ s3.delete_bucket(Bucket=bucket_name)
+ logger.info(f"Bucket {bucket_name} deleted successfully")
+
+ return JSONResponse(status_code=200, content={"result": 1, "errorMessage": ""})
+ except ClientError as e:
+ logger.error(f"Error deleting bucket: {e}")
+ return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
+
+
+@router.get("/object/list")
+async def get_object_list(bucket_name: str, s3=Depends(get_s3_client)):
+ try:
+ response = s3.list_objects_v2(Bucket=bucket_name)
+
+ return JSONResponse(
+ status_code=200,
+ content={
+ "result": 1,
+ "errorMessage": "",
+ "data": {"body": json.dumps(response.get("Contents", []), default=convert_datetime_to_str)},
+ },
+ )
+ except ClientError as e:
+ logger.error(f"Error listing objects: {e}")
+ return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
+
+
+@router.get("/object/download")
+async def get_object(
+ bucket_name: str,
+ download_uuid: str,
+ object_path: str = "",
+ force: str = False,
+ s3=Depends(get_s3_client),
+):
+ try:
+ download_base_dir = os.path.join(settings.MYDISK_INFO.ROOT_DIR, "ADMIN", download_uuid)
+ response = s3.list_objects_v2(Bucket=bucket_name, Prefix=object_path)
+ logger.debug(response.get("Contents", []))
+ for obj in response.get("Contents", []):
+ s3_key = obj["Key"]
+ download_path = os.path.join(download_base_dir, s3_key)
+ os.makedirs(os.path.dirname(download_path), exist_ok=True)
+ if force or not os.path.exists(download_path):
+ s3.download_file(bucket_name, s3_key, download_path)
+
+ logger.debug(f"{object_path} download to :: {download_base_dir}/{object_path}")
+
+ return JSONResponse(status_code=200, content={"result": 1, "errorMessage": ""})
+ except ClientError as e:
+ logger.error(f"Error getting object: {e}")
+ return JSONResponse(status_code=500, content={"result": 0, "errorMessage": str(e)})
+
+
+@router.post("/object")
+async def upload_object(bucket_name: str, object_path: Optional[str] = "", s3=Depends(get_s3_client)):
+ try:
+ s3.head_bucket(Bucket=bucket_name)
+ except ClientError:
+ s3.create_bucket(Bucket=bucket_name)
+ logger.info(f"Bucket {bucket_name} created successfully (with upload)")
+
+ try:
+ local_base_dir = os.path.join(settings.MYDISK_INFO.ROOT_DIR, "ADMIN", bucket_name)
+ object_full_path = os.path.join(local_base_dir, object_path)
+ logger.debug(object_full_path)
+ if os.path.isdir(object_full_path):
+ for root, dirs, files in os.walk(object_full_path):
+ for file in files:
+ local_path = os.path.join(root, file)
+ s3_path = os.path.relpath(local_path, local_base_dir)
+ await upload_file_one(bucket_name=bucket_name, local_path=local_path, s3_path=s3_path, s3=s3)
+ logger.debug(f"Object {local_path} uploaded to {bucket_name}/{s3_path} successfully")
+ else:
+ await upload_file_one(bucket_name, object_full_path, object_path, s3)
+ logger.debug(f"upload one {object_full_path}/{object_path}")
+ return JSONResponse(status_code=200, content={"result": 1, "errorMessage": ""})
+ except ClientError as e:
+ print(f"Error uploading object: {e}")
+
+
+async def upload_file_one(bucket_name, local_path, s3_path, s3):
+ with open(local_path, "rb") as data:
+ s3.upload_fileobj(data, bucket_name, s3_path)
diff --git a/API_SERVICE/mydisk_service/gunicorn.conf.py b/API_SERVICE/mydisk_service/gunicorn.conf.py
new file mode 100644
index 00000000..dfcf6dc5
--- /dev/null
+++ b/API_SERVICE/mydisk_service/gunicorn.conf.py
@@ -0,0 +1,246 @@
+# Gunicorn configuration file.
+#
+# Server socket
+#
+# bind - The socket to bind.
+#
+# A string of the form: 'HOST', 'HOST:PORT', 'unix:PATH'.
+# An IP is a valid HOST.
+#
+# backlog - The number of pending connections. This refers
+# to the number of clients that can be waiting to be
+# served. Exceeding this number results in the client
+# getting an error when attempting to connect. It should
+# only affect servers under significant load.
+#
+# Must be a positive integer. Generally set in the 64-2048
+# range.
+#
+import os
+
+bind = "0.0.0.0:8000"
+backlog = 2048
+
+#
+# Worker processes
+#
+# workers - The number of worker processes that this server
+# should keep alive for handling requests.
+#
+# A positive integer generally in the 2-4 x $(NUM_CORES)
+# range. You'll want to vary this a bit to find the best
+# for your particular application's work load.
+#
+# worker_class - The type of workers to use. The default
+# sync class should handle most 'normal' types of work
+# loads. You'll want to read
+# http://docs.gunicorn.org/en/latest/design.html#choosing-a-worker-type
+# for information on when you might want to choose one
+# of the other worker classes.
+#
+# A string referring to a Python path to a subclass of
+# gunicorn.workers.base.Worker. The default provided values
+# can be seen at
+# http://docs.gunicorn.org/en/latest/settings.html#worker-class
+#
+# worker_connections - For the eventlet and gevent worker classes
+# this limits the maximum number of simultaneous clients that
+# a single process can handle.
+#
+# A positive integer generally set to around 1000.
+#
+# timeout - If a worker does not notify the master process in this
+# number of seconds it is killed and a new worker is spawned
+# to replace it.
+#
+# Generally set to thirty seconds. Only set this noticeably
+# higher if you're sure of the repercussions for sync workers.
+# For the non sync workers it just means that the worker
+# process is still communicating and is not tied to the length
+# of time required to handle a single request.
+#
+# keepalive - The number of seconds to wait for the next request
+# on a Keep-Alive HTTP connection.
+#
+# A positive integer. Generally set in the 1-5 seconds range.
+#
+# reload - Restart workers when code changes.
+#
+# This setting is intended for development. It will cause
+# workers to be restarted whenever application code changes.
+workers = 3
+threads = 3
+worker_class = "uvicorn.workers.UvicornWorker"
+worker_connections = 1000
+timeout = 60
+keepalive = 2
+reload = False
+
+#
+# spew - Install a trace function that spews every line of Python
+# that is executed when running the server. This is the
+# nuclear option.
+#
+# True or False
+#
+
+spew = False
+
+#
+# Server mechanics
+#
+# daemon - Detach the main Gunicorn process from the controlling
+# terminal with a standard fork/fork sequence.
+#
+# True or False
+#
+# raw_env - Pass environment variables to the execution environment.
+#
+# pidfile - The path to a pid file to write
+#
+# A path string or None to not write a pid file.
+#
+# user - Switch worker processes to run as this user.
+#
+# A valid user id (as an integer) or the name of a user that
+# can be retrieved with a call to pwd.getpwnam(value) or None
+# to not change the worker process user.
+#
+# group - Switch worker process to run as this group.
+#
+# A valid group id (as an integer) or the name of a user that
+# can be retrieved with a call to pwd.getgrnam(value) or None
+# to change the worker processes group.
+#
+# umask - A mask for file permissions written by Gunicorn. Note that
+# this affects unix socket permissions.
+#
+# A valid value for the os.umask(mode) call or a string
+# compatible with int(value, 0) (0 means Python guesses
+# the base, so values like "0", "0xFF", "0022" are valid
+# for decimal, hex, and octal representations)
+#
+# tmp_upload_dir - A directory to store temporary request data when
+# requests are read. This will most likely be disappearing soon.
+#
+# A path to a directory where the process owner can write. Or
+# None to signal that Python should choose one on its own.
+#
+
+daemon = False
+pidfile = "./gunicorn-mydisk.pid"
+umask = 0
+user = None
+group = None
+tmp_upload_dir = None
+
+#
+# Logging
+#
+# logfile - The path to a log file to write to.
+#
+# A path string. "-" means log to stdout.
+#
+# loglevel - The granularity of log output
+#
+# A string of "debug", "info", "warning", "error", "critical"
+#
+
+
+def get_log_path():
+ import os
+
+ path_ = os.path.join(os.path.dirname(os.path.abspath(__file__)), "log")
+ if not os.path.exists(path_):
+ os.makedirs(path_)
+ print(f"make dir {path_}")
+
+ return path_
+
+
+app_env = os.getenv("APP_ENV", "prod")
+if app_env == "prod":
+ loglevel = "info"
+ log_name = "gunicorn-mydisk"
+ log_dir_path = get_log_path()
+ logfile = os.path.join(log_dir_path, log_name + ".log")
+ errorlog = os.path.join(log_dir_path, log_name + "-error.log")
+ accesslog = logfile
+else:
+ loglevel = "debug"
+ logfile = "-"
+ errorlog = "-"
+ accesslog = "-"
+
+access_log_format = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"'
+
+#
+# Process naming
+#
+# proc_name - A base to use with setproctitle to change the way
+# that Gunicorn processes are reported in the system process
+# table. This affects things like 'ps' and 'top'. If you're
+# going to be running more than one instance of Gunicorn you'll
+# probably want to set a name to tell them apart. This requires
+# that you install the setproctitle module.
+#
+# A string or None to choose a default of something like 'gunicorn'.
+#
+
+proc_name = "API-Mydisk-Service"
+
+
+#
+# Server hooks
+#
+# post_fork - Called just after a worker has been forked.
+#
+# A callable that takes a server and worker instance
+# as arguments.
+#
+# pre_fork - Called just prior to forking the worker subprocess.
+#
+# A callable that accepts the same arguments as after_fork
+#
+# pre_exec - Called just prior to forking off a secondary
+# master process during things like config reloading.
+#
+# A callable that takes a server instance as the sole argument.
+#
+
+
+def post_fork(server, worker):
+ server.log.info("Worker spawned (pid: %s)", worker.pid)
+
+
+def pre_fork(server, worker):
+ pass
+
+
+def pre_exec(server):
+ server.log.info("Forked child, re-executing.")
+
+
+def when_ready(server):
+ server.log.info("Server is ready. Spawning workers")
+
+
+def worker_int(worker):
+ worker.log.info("worker received INT or QUIT signal")
+
+ # get traceback info
+ import threading, sys, traceback
+
+ id2name = {th.ident: th.name for th in threading.enumerate()}
+ code = []
+ for threadId, stack in sys._current_frames().items():
+ code.append("\n# Thread: %s(%d)" % (id2name.get(threadId, ""), threadId))
+ for filename, lineno, name, line in traceback.extract_stack(stack):
+ code.append('File: "%s", line %d, in %s' % (filename, lineno, name))
+ if line:
+ code.append(" %s" % (line.strip()))
+ worker.log.debug("\n".join(code))
+
+
+def worker_abort(worker):
+ worker.log.info("worker received SIGABRT signal")
diff --git a/API_SERVICE/mydisk_service/gunicorn.sh b/API_SERVICE/mydisk_service/gunicorn.sh
new file mode 100644
index 00000000..b524da4d
--- /dev/null
+++ b/API_SERVICE/mydisk_service/gunicorn.sh
@@ -0,0 +1,62 @@
+#!/bin/bash
+
+root_path="$( cd "$( dirname "$0" )" && pwd -P )"
+pid_path="$root_path/gunicorn-login.pid"
+
+echo $pid_path
+
+# gunicorn 실행 명령어
+start_gunicorn() {
+ gunicorn app.main:app --bind 0.0.0.0:22000 -c gunicorn.conf.py -D --pid $pid_path
+ sleep 2
+ pid=$(cat $pid_path)
+ echo "Gunicorn started. PID: $pid"
+}
+
+# gunicorn 중지 명령어
+stop_gunicorn() {
+ if [ -f $pid_path ]; then
+ pid=$(cat $pid_path)
+ kill "$pid"
+ rm $pid_path
+ echo "Gunicorn stopped. PID: $pid"
+ else
+ echo "Gunicorn is not running."
+ fi
+}
+
+# gunicorn 재실행 명령어
+restart_gunicorn() {
+ stop_gunicorn
+ start_gunicorn
+}
+
+# gunicorn 실행 상태 확인
+status_gunicorn() {
+ if [ -f $pid_path ]; then
+ pid=$(cat $pid_path)
+ echo "Gunicorn is running. PID: $pid"
+ else
+ echo "Gunicorn is not running."
+ fi
+}
+
+# 스크립트 옵션 처리
+case "$1" in
+ start)
+ start_gunicorn
+ ;;
+ stop)
+ stop_gunicorn
+ ;;
+ restart)
+ restart_gunicorn
+ ;;
+ status)
+ status_gunicorn
+ ;;
+ *)
+ echo "Usage: $0 {start|stop|restart|status}"
+ exit 1
+ ;;
+esac
\ No newline at end of file
diff --git a/API_SERVICE/mydisk_service/requirements.txt b/API_SERVICE/mydisk_service/requirements.txt
new file mode 100644
index 00000000..c1da15f1
--- /dev/null
+++ b/API_SERVICE/mydisk_service/requirements.txt
@@ -0,0 +1,56 @@
+aiohttp==3.8.4
+aiosignal==1.3.1
+anyio==3.6.2
+APScheduler==3.10.1
+async-timeout==4.0.2
+attrs==23.1.0
+bcrypt==4.0.1
+certifi==2022.12.7
+cffi==1.15.1
+charset-normalizer==3.1.0
+click==8.1.3
+cryptography==40.0.2
+elastic-transport==8.4.0
+elasticsearch==8.7.0
+exceptiongroup==1.1.2
+fastapi==0.95.1
+frozenlist==1.3.3
+greenlet==2.0.2
+gunicorn==20.1.0
+h11==0.14.0
+httpcore==0.17.3
+httpx==0.24.1
+idna==3.4
+iniconfig==2.0.0
+multidict==6.0.4
+numpy==1.26.0
+packaging==23.1
+pandas==2.1.0
+passlib==1.7.4
+Pillow==10.0.1
+pluggy==1.2.0
+psycopg2-binary==2.9.6
+pycparser==2.21
+pycryptodome==3.18.0
+pydantic==1.10.7
+PyJWT==2.7.0
+pytest==7.4.0
+python-dotenv==1.0.0
+pytz==2023.3
+requests==2.31.0
+sniffio==1.3.0
+SQLAlchemy==2.0.9
+starlette==0.26.1
+tomli==2.0.1
+typing_extensions==4.5.0
+tzdata==2023.3
+tzlocal==5.0.1
+uvicorn==0.21.1
+yarl==1.8.2
+boto3==1.34.19
+botocore==1.34.19
+jmespath==1.0.1
+python-dateutil==2.8.2
+s3transfer==0.10.0
+six==1.16.0
+urllib3==1.26.18
\ No newline at end of file
diff --git a/README.md b/README.md
index 707c2fb1..5a70b69b 100644
--- a/README.md
+++ b/README.md
@@ -1,2 +1,4 @@
AP_API_Router
# AP_API_Router
+
+docker build -t {image_name} -f {service.Dockerfile} .
\ No newline at end of file
diff --git a/batch.Dockerfile b/batch.Dockerfile
new file mode 100644
index 00000000..a43f8db2
--- /dev/null
+++ b/batch.Dockerfile
@@ -0,0 +1,17 @@
+FROM python:3.9-alpine
+
+RUN apk add --update alpine-sdk && \
+ apk add --update --no-cache postgresql-client && \
+ apk add --update --no-cache --virtual .tmp-build-deps \
+ build-base gcc python3-dev postgresql-dev musl-dev libffi-dev openssl-dev cargo cmake openblas-dev
+
+COPY ./API_SERVICE/batch_service /app/source/batch_service
+COPY ./common_libs /app/common_libs
+WORKDIR /app/source/batch_service
+
+RUN pip install --no-cache --upgrade pip && pip install -r requirements.txt
+
+ENV APP_ENV=prod
+ENV PYTHONPATH=/app/source:/app/common_libs
+
+CMD [ "gunicorn", "app.main:app", "-c", "gunicorn.conf.py"]
diff --git a/common.Dockerfile b/common.Dockerfile
new file mode 100644
index 00000000..66da57de
--- /dev/null
+++ b/common.Dockerfile
@@ -0,0 +1,19 @@
+FROM python:3.9-alpine
+
+RUN apk add --update alpine-sdk && \
+ apk add --update --no-cache postgresql-client && \
+ apk add --update --no-cache --virtual .tmp-build-deps \
+ build-base gcc python3-dev postgresql-dev musl-dev libffi-dev openssl-dev cargo cmake openblas-dev
+
+COPY ./API_SERVICE/common_service /app/source/common_service
+COPY ./common_libs /app/common_libs
+WORKDIR /app/source/common_service
+
+RUN pip install --no-cache --upgrade pip && pip install -r requirements.txt
+
+ENV APP_ENV=prod
+ENV PYTHONPATH=/app/source:/app/common_libs
+
+EXPOSE 8000
+
+CMD [ "gunicorn", "app.main:app", "-c", "gunicorn.conf.py"]
diff --git a/common_libs/libs/auth/__init__.py b/common_libs/libs/auth/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/common_libs/libs/auth/jwt.py b/common_libs/libs/auth/jwt.py
new file mode 100644
index 00000000..f6d61158
--- /dev/null
+++ b/common_libs/libs/auth/jwt.py
@@ -0,0 +1,13 @@
+from datetime import datetime, timedelta
+
+import jwt
+
+
+def create_access_token(data: dict = None, expires_delta: int = 60, secret_key=None, algorithm=None, exclude_list=[]):
+ to_encode = data.copy()
+ for k in exclude_list:
+ to_encode.pop(k)
+ if expires_delta:
+ to_encode.update({"exp": datetime.utcnow() + timedelta(hours=expires_delta)})
+ encoded_jwt = jwt.encode(to_encode, secret_key, algorithm=algorithm)
+ return encoded_jwt
diff --git a/common_libs/libs/auth/keycloak.py b/common_libs/libs/auth/keycloak.py
new file mode 100644
index 00000000..1ed90016
--- /dev/null
+++ b/common_libs/libs/auth/keycloak.py
@@ -0,0 +1,351 @@
+from typing import Dict
+import logging
+import aiohttp
+import urllib.parse
+
+logger = logging.getLogger()
+
+
+class KeycloakManager:
+ _instance = None
+
+ def __new__(cls, *args, **kwargs):
+ if not cls._instance:
+ cls._instance = super().__new__(cls)
+ return cls._instance
+
+ def __init__(self, base_url: str = None) -> None:
+ self.base_url = base_url
+
+ def set_url(self, base_url):
+ self.base_url = base_url
+
+ async def _request_to_keycloak(self, api_url, method, headers, **kwargs):
+ """_summary_
+
+ Args:
+ api_url (_type_): _description_
+ method (_type_): _description_
+ headers (_type_): _description_
+
+ Returns:
+ _type_: _description_
+ """
+ data = urllib.parse.urlencode(kwargs)
+ print(data)
+ async with aiohttp.ClientSession() as session:
+ async with session.request(url=api_url, method=method, headers=headers, data=data) as response:
+ try:
+ ret = await response.json()
+ except Exception:
+ ret = await response.read()
+ return {"status_code": response.status, "data": ret}
+
+ async def generate_admin_token(self, **kwargs) -> Dict:
+ """
+ 관리자계정에 대한 토큰 발급
+
+ Args:
+ username (str):
+ password (str):
+ grant_type (str): refresh_token or password
+
+ Returns:
+ Dict: _description_
+ """
+ headers = {"Content-Type": "application/x-www-form-urlencoded"}
+ return await self._request_to_keycloak(
+ api_url=f"{self.base_url}/realms/master/protocol/openid-connect/token",
+ client_id="admin-cli",
+ method="POST",
+ headers=headers,
+ **kwargs,
+ )
+
+ async def generate_normal_token(self, realm, **kwargs) -> Dict:
+ """
+ 일반회원의 토큰 발급
+
+ Args:
+ realm (_type_): keycloak 인증 그룹
+ grant_type (str): 인증방법('password', 'refresh_token')
+ username (str): 계정명
+ password (str): 패스워드
+ refresh_token (str): 리프레시 토큰
+ client_id (str): keycloak client_id
+ client_secret (str): keycloak_client_id에 대응하는 secret key
+
+ Returns:
+ Dict: _description_
+ """
+
+ headers = {"Content-Type": "application/x-www-form-urlencoded"}
+ return await self._request_to_keycloak(
+ api_url=f"{self.base_url}/realms/{realm}/protocol/openid-connect/token",
+ method="POST",
+ headers=headers,
+ **kwargs,
+ )
+
+ async def token_info(self, realm, **kwargs) -> Dict:
+ """_summary_
+
+ Args:
+ realm (_type_): _description_
+
+ Returns:
+ Dict: _description_
+ """
+
+ headers = {"Content-Type": "application/x-www-form-urlencoded"}
+ return await self._request_to_keycloak(
+ api_url=f"{self.base_url}/realms/{realm}/protocol/openid-connect/token/introspect",
+ method="POST",
+ headers=headers,
+ **kwargs,
+ )
+
+ async def create_user(self, token, realm, **kwargs):
+ headers = {"Content-Type": "application/json", "Authorization": "bearer " + token}
+ async with aiohttp.ClientSession() as session:
+ async with session.request(
+ url=f"{self.base_url}/admin/realms/{realm}/users",
+ method="POST",
+ headers=headers,
+ json=kwargs,
+ ) as response:
+ return {"status_code": response.status, "data": await response.read()}
+
+ async def delete_user(self, token, realm, user_id):
+ headers = {"Authorization": "bearer " + token}
+ return await self._request_to_keycloak(
+ api_url=f"{self.base_url}/admin/realms/{realm}/users/{user_id}", method="DELETE", headers=headers
+ )
+
+ async def get_user_list(self, token, realm):
+ headers = {"Authorization": "bearer " + token}
+ return await self._request_to_keycloak(
+ api_url=f"{self.base_url}/admin/realms/{realm}/users", method="GET", headers=headers
+ )
+
+ async def user_info(self, token, realm):
+ headers = {"Authorization": "bearer " + token}
+ return await self._request_to_keycloak(
+ api_url=f"{self.base_url}/realms/{realm}/protocol/openid-connect/userinfo", method="GET", headers=headers
+ )
+
+ async def user_info_detail(self, token, realm, user_id):
+ headers = {"Authorization": "bearer " + token}
+ return await self._request_to_keycloak(
+ api_url=f"{self.base_url}/admin/realms/{realm}/users/{user_id}", method="GET", headers=headers
+ )
+
+ async def alter_user(self, token, realm, sub, **kwargs):
+ headers = {"Content-Type": "application/json", "Authorization": "bearer " + token}
+
+ async with aiohttp.ClientSession() as session:
+ async with session.request(
+ url=f"{self.base_url}/admin/realms/{realm}/users/{sub}",
+ method="PUT",
+ headers=headers,
+ json=kwargs,
+ ) as response:
+ return {"status_code": response.status, "data": await response.read()}
+
+ async def check_user_session(self, token, realm, user_id):
+ headers = {"Authorization": "bearer " + token}
+ return await self._request_to_keycloak(
+ api_url=f"{self.base_url}/admin/realms/{realm}/users/{user_id}/sessions", method="GET", headers=headers
+ )
+
+ async def logout(self, realm, **kwargs):
+ headers = {"Content-Type": "application/x-www-form-urlencoded"}
+ return await self._request_to_keycloak(
+ api_url=f"{self.base_url}/realms/{realm}/protocol/openid-connect/logout",
+ method="POST",
+ headers=headers,
+ **kwargs,
+ )
+
+ async def refresh_token(self, realm, **kwargs):
+ headers = {"Content-Type": "application/x-www-form-urlencoded"}
+ return await self._request_to_keycloak(
+ api_url=f"{self.base_url}/realms/{realm}/protocol/openid-connect/token",
+ method="POST",
+ headers=headers,
+ **kwargs,
+ )
+
+ async def get_query(self, token, realm, query):
+ headers = {"Authorization": "bearer " + token}
+ return await self._request_to_keycloak(
+ api_url=f"{self.base_url}/admin/realms/{realm}/users?{query}", method="GET", headers=headers
+ )
+
+ async def social_link(self, token, realm, sub, **kwargs):
+ headers = {"Content-Type": "application/json", "Authorization": "bearer " + token}
+ social_type = kwargs.get("social_type")
+
+ params = {
+ "identityProvider": social_type,
+ "userId": kwargs.get("social_id"),
+ "userName": kwargs.get("social_email")
+ }
+
+ async with aiohttp.ClientSession() as session:
+ async with session.request(
+ url=f"{self.base_url}/admin/realms/{realm}/users/{sub}/federated-identity/{social_type}",
+ method="POST",
+ headers=headers,
+ json=params,
+ ) as response:
+ return {"status_code": response.status, "data": await response.read()}
+
+ async def check_idp(self, token, realm, sub):
+ headers = {"Content-Type": "application/json", "Authorization": "bearer " + token}
+ return await self._request_to_keycloak(
+ api_url=f"{self.base_url}/admin/realms/{realm}/users/{sub}/federated-identity", method="GET", headers=headers
+ )
+
+ async def check_client_id(self, token, realm):
+ headers = {"Content-Type": "application/json", "Authorization": "bearer " + token}
+ return await self._request_to_keycloak(
+ api_url=f"{self.base_url}/admin/realms/{realm}/clients", method="GET", headers=headers
+ )
+
+ async def check_client_role(self, token, realm, client_sub):
+ headers = {"Content-Type": "application/json", "Authorization": "bearer " + token}
+ return await self._request_to_keycloak(
+ api_url=f"{self.base_url}/admin/realms/{realm}/clients/{client_sub}/roles", method="GET", headers=headers
+ )
+
+ async def set_client_role_mapping(self, token, realm, **kwargs):
+ headers = {"Content-Type": "application/json", "Authorization": "bearer " + token}
+ user_sub = kwargs.get("user_sub")
+ client_sub = kwargs.get("client_sub")
+ params = [{
+ "id": kwargs.get("role_sub"),
+ "name": kwargs.get("role_name")
+ }]
+
+ async with aiohttp.ClientSession() as session:
+ async with session.request(
+ url=f"{self.base_url}/admin/realms/{realm}/users/{user_sub}/role-mappings/clients/{client_sub}",
+ method="POST",
+ headers=headers,
+ json=params,
+ ) as response:
+ return {"status_code": response.status, "data": await response.read()}
+
+if __name__ == "__main__":
+ import asyncio
+
+ realm = "kadap"
+ client_id = "uyuni"
+ client_secret = "8UDolCR5j1vHt4rsyHnwTDlYkuRmOUp8"
+
+ normal_username = "swyang"
+ normal_user_password = "zxcv1234!"
+ normal_user_email = "swyang@mobigen.com"
+
+ manager = KeycloakManager("http://192.168.101.44:8080")
+ d = asyncio.run(manager.generate_admin_token(username="admin", password="zxcv1234!", grant_type="password"))
+ print(f"admin_token :: {d}")
+ admin_access_token = d.get("data").get("access_token")
+ admin_refresh_token = d.get("data").get("refresh_token")
+ data = {
+ "username": normal_username,
+ "firstName": "seokwoo",
+ "lastName": "yang",
+ "email": normal_user_email,
+ "emailVerified": True,
+ "enabled": True,
+ "credentials": [{"value": normal_user_password}],
+ "attributes": {"phoneNumber": "010-1234-5678", "gender": "male"},
+ }
+ r = asyncio.run(
+ manager.create_user(
+ realm=realm,
+ token=admin_access_token,
+ **data,
+ )
+ )
+ print(f"create :: {r}")
+ d = asyncio.run(
+ manager.generate_normal_token(
+ realm=realm,
+ username=normal_username,
+ password=normal_user_password,
+ grant_type="password",
+ client_id=client_id,
+ client_secret=client_secret,
+ )
+ )
+ print(f"normal token :: {d}")
+ normal_access_token = d.get("data").get("access_token")
+ normal_refresh_token = d.get("data").get("refresh_token")
+ r = asyncio.run(
+ manager.token_info(
+ realm=realm,
+ token=normal_access_token,
+ client_id=client_id,
+ client_secret=client_secret,
+ )
+ )
+ print(f"token info :: {r}")
+ r = asyncio.run(manager.user_info(realm=realm, token=normal_access_token))
+ print(f"user info :: {r}")
+ user_id = r.get("data").get("sub")
+ r = asyncio.run(manager.user_info_detail(token=admin_access_token, realm=realm, user_id=user_id))
+ print(f"detail :: {r}")
+ data = {
+ "firstName": "seokwoo",
+ "lastName": "yang",
+ "email": normal_user_email,
+ "emailVerified": True,
+ "credentials": [{"value": normal_user_password}],
+ "attributes": {"phoneNumber": "010-9999-1234", "gender": "male"},
+ }
+ r = asyncio.run(manager.alter_user(token=admin_access_token, realm=realm, user_id=user_id, **data))
+ print(f"alter {r}")
+ r = asyncio.run(manager.check_user_session(token=admin_access_token, realm=realm, user_id=user_id))
+ print(f"check :: {r}")
+ r = asyncio.run(
+ manager.refresh_token(
+ realm=realm,
+ client_id=client_id,
+ client_secret=client_secret,
+ grant_type="refresh_token",
+ refresh_token=normal_refresh_token,
+ )
+ )
+ print(f"refresh :: {r}")
+ # subject_issuer = kakao | naver | google
+ r = asyncio.run(
+ manager.generate_normal_token(
+ realm=realm,
+ client_id=client_id,
+ client_secret=client_secret,
+ grant_type="urn:ietf:params:oauth:grant-type:token-exchange",
+ requested_token_type="urn:ietf:params:oauth:token-type:refresh_token",
+ subject_issuer="google",
+ subject_token=normal_access_token,
+ )
+ )
+ print(f"social regist :: {r}")
+ r = asyncio.run(
+ manager.logout(
+ realm=realm,
+ grant_type="password",
+ refresh_token=normal_refresh_token,
+ client_id=client_id,
+ client_secret=client_secret,
+ )
+ )
+ print(f"logout :: {r}")
+ r = asyncio.run(manager.delete_user(token=admin_access_token, realm=realm, user_id=user_id))
+ print(f"delete :: {r}")
+ r = asyncio.run(manager.get_user_list(token=admin_access_token, realm=realm))
+ print(f"list :: {r}")
+
+keycloak = KeycloakManager()
diff --git a/common_libs/libs/database/connector.py b/common_libs/libs/database/connector.py
index 14ae2468..2bb9a42e 100644
--- a/common_libs/libs/database/connector.py
+++ b/common_libs/libs/database/connector.py
@@ -40,3 +40,11 @@ def get_column_info(self, table_nm, schema=None) -> List[Dict[str, str]]:
@abc.abstractmethod
def close(self):
...
+
+ @abc.abstractmethod
+ def commit(self):
+ ...
+
+ @abc.abstractmethod
+ def rollback(self):
+ ...
diff --git a/common_libs/libs/database/dml_controller.py b/common_libs/libs/database/dml_controller.py
new file mode 100644
index 00000000..e2839d43
--- /dev/null
+++ b/common_libs/libs/database/dml_controller.py
@@ -0,0 +1,33 @@
+from typing import Union
+
+
+class Base:
+ table_nm: str
+ key_column: Union[str, list]
+
+ @classmethod
+ def get_select_query(cls, key_value: str) -> dict:
+ return {
+ "table_nm": cls.table_nm,
+ "where_info": [
+ {
+ "table_nm": cls.table_nm,
+ "key": cls.key_column,
+ "value": key_value,
+ "compare_op": "=",
+ "op": "",
+ }
+ ],
+ }
+
+ @classmethod
+ def get_execute_query(cls, method: str, row: dict) -> dict:
+ query = {
+ "method": method,
+ "table_nm": cls.table_nm,
+ "data": row,
+ "key": cls.key_column if type(cls.key_column) is list else [cls.key_column],
+ }
+ if method.upper() == "INSERT":
+ query.pop("key")
+ return query
diff --git a/common_libs/libs/database/orm.py b/common_libs/libs/database/orm.py
index af4a8a2f..223f8e66 100644
--- a/common_libs/libs/database/orm.py
+++ b/common_libs/libs/database/orm.py
@@ -1,13 +1,22 @@
+from datetime import datetime
+import json
+import logging
from typing import Dict, List, Union, Tuple, Optional
import sqlalchemy
from fastapi import FastAPI
-from sqlalchemy import Column, MetaData, and_, create_engine, not_, or_
-from sqlalchemy.orm import sessionmaker, declarative_base, Session, Query
+from sqlalchemy import Column, MetaData, and_, create_engine, not_, or_, Table
+from sqlalchemy.orm import sessionmaker, Session, Query
+from sqlalchemy.sql import column
from .connector import Connector, Executor
-db = declarative_base()
+
+logger = logging.getLogger()
+
+
+class TableNotFoundException(Exception):
+ ...
class SQLAlchemyConnector(Connector):
@@ -16,14 +25,17 @@ def __init__(self, base=None, app: FastAPI = None, **kwargs):
self._Base = base
self._session = None
self._metadata = None
+ self._schemas = []
if app is not None:
self.init_app(app=app, **kwargs)
def init_app(self, app: FastAPI, **kwargs):
- database_url = kwargs.get("DB_URL")
- pool_recycle = kwargs.get("DB_POOL_RECYCLE", 900)
+ db_info = kwargs.get("DB_INFO")
+ database_url = db_info.get("DB_URL")
+ pool_recycle = db_info.get("DB_POOL_RECYCLE", 900)
+ echo = db_info.get("DB_ECHO", False)
+ self._schemas = db_info.get("SCHEMA").split(",")
is_testing = kwargs.get("TESTING", False)
- echo = kwargs.get("DB_ECHO", False)
is_reload = kwargs.get("RELOAD", False)
self._engine = create_engine(
@@ -34,10 +46,7 @@ def init_app(self, app: FastAPI, **kwargs):
)
self._session = sessionmaker(autocommit=False, autoflush=False, bind=self._engine)
-
- self._metadata = MetaData()
- for schema in kwargs.get("DB_INFO").get("SCHEMA").split(","):
- self._metadata.reflect(bind=self._engine, views=True, schema=schema)
+ self.reset_metadata()
@app.on_event("startup")
def startup():
@@ -48,25 +57,41 @@ def shutdown():
self._session.close_all()
self._engine.dispose()
- def get_db(self) -> "SQLAlchemyConnector":
+ def get_db(self) -> Executor:
if self._session is None:
raise Exception("must be called 'init_db'")
- executor = OrmExecutor(self._session(), self._metadata)
+ executor = OrmExecutor(self._session(), self._metadata, self)
try:
yield executor
finally:
executor.close()
+ def get_db_manager(self):
+ return OrmExecutor(self._session(), self._metadata, self)
+
+ def reset_metadata(self):
+ self._metadata = MetaData()
+ for schema in self._schemas:
+ self._metadata.reflect(bind=self._engine, views=True, schema=schema)
+
class OrmExecutor(Executor):
- def __init__(self, session: Session, metadata: MetaData):
+ def __init__(self, session: Session, metadata: MetaData, conn: SQLAlchemyConnector):
self._session = session
self._metadata = metadata
+ self._conn = conn
self._cnt = 0
self._q: Optional[Query] = None
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.close()
+
def query(self, **kwargs) -> "OrmExecutor":
base_table = self.get_table(kwargs["table_nm"])
+ logger.info(base_table)
key = kwargs.get("key")
# Join
if join_info := kwargs.get("join_info"):
@@ -74,7 +99,7 @@ def query(self, **kwargs) -> "OrmExecutor":
query = self._session.query(base_table, join_table).join(
join_table,
getattr(base_table.columns, key) == getattr(join_table.columns, join_info["key"]),
- )
+ )
else:
query = self._session.query(base_table)
@@ -129,45 +154,65 @@ def all(self) -> Tuple[List[dict], int]:
columns = self.get_query_columns()
data = [dict(zip(columns, data)) for data in self._q.all()]
- return data, self._cnt
+ return json.loads(json.dumps(data, default=str)), self._cnt
def first(self):
- columns = self.get_query_columns()
- return dict(zip(columns, self._q.first()))
+ try:
+ columns = self.get_query_columns()
+ dat = self._q.first()
+ return json.loads(json.dumps(dict(zip(columns, dat)), default=str)) if dat else None
+ except Exception as e:
+ raise e
def execute(self, **kwargs):
- """
- {"result":1,"errorMessage":""}
- """
- # try:
- # session.begin()
-
- # for row in params:
- # method = row.method.lower()
- # table = db.get_table(row.table_nm)
- # cond = [getattr(table.columns, k) == row.data[k] for k in row.key] if row.key else []
-
- # if method == "insert":
- # ins = table.insert().values(**row.data)
- # session.execute(ins)
- # elif method == "update":
- # stmt = table.update().where(*cond).values(**row.data)
- # session.execute(stmt)
- # elif method == "delete":
- # stmt = table.delete().where(*cond)
- # session.execute(stmt)
- # else:
- # raise NotImplementedError
-
- # session.commit()
- # except Exception as e:
- # session.rollback()
- # raise e
-
- def get_table(self, table_nm):
- for nm, t in self._metadata.tables.items():
- if table_nm in nm:
- return t
+ try:
+ method = kwargs["method"].lower()
+ table = self.get_table(kwargs["table_nm"])
+ data = self._data_parse(kwargs["data"])
+
+ cond = []
+ if keys := kwargs.get("key", []):
+ cond = [getattr(table.columns, k) == data[k] for k in keys]
+
+ if method == "insert":
+ stmt = table.insert().values(**data)
+ elif method == "update":
+ stmt = table.update().where(*cond).values(**data)
+ elif method == "delete":
+ stmt = table.delete().where(*cond)
+ else:
+ raise NotImplementedError
+
+ self._session.execute(stmt)
+
+ if auto_commit := kwargs.get("auto_commit", True):
+ self._session.commit()
+ except Exception as e:
+ self._session.rollback()
+ raise e
+
+ def commit(self):
+ self._session.commit()
+
+ def rollback(self):
+ self._session.rollback()
+
+ def get_table(self, table_nm) -> Table:
+ def __search(table_nm):
+ table_nm = table_nm.lower()
+ for nm, t in self._metadata.tables.items():
+ if nm.lower().endswith(table_nm):
+ return t
+
+ for _ in range(2):
+ ret = __search(table_nm)
+ if ret is not None:
+ return ret
+ self._conn.reset_metadata()
+
+ err_msg = f"table not found :: {table_nm}"
+ logger.error(f"{err_msg}, {self._metadata.tables.keys()}, {self._conn._schemas}")
+ raise TableNotFoundException(err_msg)
def get_query_columns(self):
return [desc["name"] for desc in self._q.column_descriptions] if self._q else None
@@ -200,8 +245,23 @@ def _parse_operand(self, key: Column, value: Union[str, int], compare: str):
return
def get_column_info(self, table_nm, schema=None) -> List[Dict[str, str]]:
- ...
+ raise Exception("NOT IMPLE...")
def close(self):
self._session.close()
+ def _data_parse(self, data):
+ ret = {}
+ for k, v in data.items():
+ if str(v).startswith("`"):
+ if "+" in v:
+ v = v[1:].split("+")
+ ret[k] = column(v[0].strip()) + int(v[1])
+ elif "-" in v:
+ v = v[1:].split("-")
+ data[k] = column(v[0].strip()) - int(v[1])
+ elif str(v).upper().startswith("NOW"):
+ ret[k] = datetime.now()
+ else:
+ ret[k] = v
+ return ret
diff --git a/common_libs/libs/database/tibero.py b/common_libs/libs/database/tibero.py
index 9f357ef6..179a59d8 100644
--- a/common_libs/libs/database/tibero.py
+++ b/common_libs/libs/database/tibero.py
@@ -59,10 +59,13 @@ def query(self, **kwargs) -> "QueryExecutor":
order_clause = ""
if order_info := kwargs.get("order_info"):
- t = order_info["table_nm"]
- k = order_info["key"]
- o = order_info["order"]
- order_clause += f"order by {t}.{k} {str(o).upper()} "
+ order_clause = "order by "
+ if type(order_info) is dict : order_info = [order_info]
+ for info in order_info :
+ t = info["table_nm"]
+ k = info["key"]
+ o = info["order"]
+ order_clause += f" {t}.{k} {str(o).upper()} "
query = f"select * from {table_nm} " + join_clause + where_clause + order_clause
self._cntq = f"select count(*) from {table_nm} " + join_clause + where_clause + order_clause
@@ -222,7 +225,7 @@ async def shutdown():
if self.conn:
self.conn.close()
- def get_db(self) -> "TiberoConnector":
+ def get_db(self) -> Executor:
executor = QueryExecutor(self.conn)
try:
yield executor
diff --git a/common_libs/libs/disk/__init__.py b/common_libs/libs/disk/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/common_libs/libs/disk/mydisk.py b/common_libs/libs/disk/mydisk.py
new file mode 100644
index 00000000..089c73f9
--- /dev/null
+++ b/common_libs/libs/disk/mydisk.py
@@ -0,0 +1,78 @@
+from typing import Dict
+import logging
+import aiohttp
+import urllib.parse
+
+logger = logging.getLogger()
+
+
+class MydiskManager:
+ _instance = None
+
+ def __new__(cls, *args, **kwargs):
+ if not cls._instance:
+ cls._instance = super().__new__(cls)
+ return cls._instance
+
+ def __init__(self, base_url: str = None) -> None:
+ self.base_url = base_url
+
+ def set_url(self, base_url):
+ self.base_url = base_url
+
+ async def _request_to_mydisk(self, api_url, method, headers, **kwargs):
+ """_summary_
+
+ Args:
+ api_url (_type_): _description_
+ method (_type_): _description_
+ headers (_type_): _description_
+
+ Returns:
+ _type_: _description_
+ """
+ data = urllib.parse.urlencode(kwargs)
+ print(data)
+ async with aiohttp.ClientSession() as session:
+ async with session.request(url=api_url, method=method, headers=headers, data=data) as response:
+ try:
+ ret = await response.json()
+ except Exception:
+ ret = await response.read()
+ return {"status_code": response.status, "data": ret}
+
+ async def generate_admin_token(self, **kwargs) -> Dict:
+ """
+ 관리자계정에 대한 토큰 발급
+
+ Args:
+ username (str):
+ password (str):
+ scope (str): upload profile admin list
+ grant_type (str) : password
+ client_id (str) :
+ client_secret (str) :
+
+ Returns:
+ Dict: _description_
+ """
+ headers = {"Content-Type": "application/x-www-form-urlencoded"}
+ return await self._request_to_mydisk(
+ api_url=f"{self.base_url}/oauth2/token/",
+ method="POST",
+ headers=headers,
+ grant_type="password",
+ **kwargs,
+ )
+
+ async def file_download(self, token, path):
+ headers = {"Authorization": "bearer " + token}
+ return await self._request_to_mydisk(
+ api_url=f"{self.base_url}/api.php/files/download/",
+ method="POST",
+ headers=headers,
+ path=path,
+ )
+
+
+mydisk = MydiskManager()
diff --git a/common_libs/libs/els/ELKSearch/README.md b/common_libs/libs/els/ELKSearch/README.md
new file mode 100644
index 00000000..3832ab35
--- /dev/null
+++ b/common_libs/libs/els/ELKSearch/README.md
@@ -0,0 +1,13 @@
+# ELKSearch
+
+- mapping
+ - index 생성에 사용되는 mapping 파일을 저장하는 폴더
+- Utils
+ - base.py: document와 index에서 공통적으로 사용되는 util 모듈
+- test
+ - ELKSearch 모듈을 테스트하기 위한 코드를 저장해 두는 폴더
+- ELKSearch
+ - config: els 연결에 사용할 설정을 저장해두는 코드
+ - model: 검색이나 els 설정에 사용될 데이터 모델을 작성해둔 파일
+ - index: elasticsearch의 index를 관리하기 위한 모듈
+ - document: index의 데이터를 관리하기 위한 모듈
\ No newline at end of file
diff --git a/common_libs/libs/els/ELKSearch/Utils/__init__.py b/common_libs/libs/els/ELKSearch/Utils/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/common_libs/libs/els/ELKSearch/Utils/base.py b/common_libs/libs/els/ELKSearch/Utils/base.py
new file mode 100644
index 00000000..c24ebe5f
--- /dev/null
+++ b/common_libs/libs/els/ELKSearch/Utils/base.py
@@ -0,0 +1,18 @@
+import re
+import string
+from elasticsearch import Elasticsearch
+
+
+def set_els(host, port):
+ return Elasticsearch(f"http://{host}:{port}", timeout=600)
+
+
+def make_format(key, inner_key, value) -> dict:
+ query = {key: {inner_key: value}}
+ return query
+
+
+def symbol_filter(keywords: str):
+ words = " ".join(keywords).strip()
+ words = re.sub(f"[{string.punctuation}]"," ",words)
+ return words
diff --git a/common_libs/libs/els/ELKSearch/Utils/document_utils.py b/common_libs/libs/els/ELKSearch/Utils/document_utils.py
new file mode 100644
index 00000000..49a53677
--- /dev/null
+++ b/common_libs/libs/els/ELKSearch/Utils/document_utils.py
@@ -0,0 +1,11 @@
+
+
+def search_filter(find_data):
+ return [data["_source"] for data in find_data["hits"]["hits"]]
+
+
+def set_source(source):
+ if source is None:
+ return []
+ else:
+ return source
diff --git a/common_libs/libs/els/ELKSearch/__init__.py b/common_libs/libs/els/ELKSearch/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/common_libs/libs/els/ELKSearch/document.py b/common_libs/libs/els/ELKSearch/document.py
new file mode 100644
index 00000000..d40a9339
--- /dev/null
+++ b/common_libs/libs/els/ELKSearch/document.py
@@ -0,0 +1,98 @@
+from libs.els.ELKSearch.Utils.base import make_format
+from libs.els.ELKSearch.Utils.document_utils import set_source
+
+
+class DocumentManager:
+ def __init__(self, connect, index: str, size: int = 0, from_:int = 0):
+ """
+ :param connect: Elasticsearch instance
+ :param index: document를 사용할 index명 (DB의 table명과 유사)
+ :param size: 검색 결과 얼만큼 출력해 줄지 결정할 사이즈
+ :param from_: 검색 결과를 어디서 부터 출력해 줄지 결정할 사이즈
+ """
+ self.connect = connect
+ self.index = index
+ self.body = dict()
+ self.size = size
+ self.page = size * from_
+
+ def set_body(self,body: dict):
+ self.body = body
+
+ def set_sort(self, sort_list: list):
+ self.body["sort"] = sort_list
+
+ def insert(self, doc_id: str):
+ """
+ document 데이터 추가
+ """
+ return self.connect.index(index=self.index, body=self.body, id=doc_id)
+
+ def update(self, doc_id):
+ """
+ document update
+ id 값을 이용해 document를 특정하고 body의 내용을 덮어쓰기 하는 기능
+ :param doc_id: els에서 설정된 document id 값
+ :return:
+ """
+ return self.connect.index(index=self.index, id=doc_id, body=self.body)
+
+ def find(self, source: list = None) -> dict:
+ """
+ els 검색 기능
+ 특정 index에서 조건에 맞는 document를 출력
+ :param source: 출력할 결과 컬럼명
+ :return:
+ """
+ source = set_source(source)
+ return self.connect.search(
+ index=self.index,
+ body=self.body,
+ from_=self.page,
+ size=self.size,
+ _source=source
+ )
+
+ def delete(self, pk_name: str, pk_value):
+ """
+ els document 삭제 기능
+ 특정 document를 검색해서 검색 결과에 해당하는 항목을 삭제함
+ pk_name,pk_value가 유니크한 값이 아니면 여러개의 항목이 삭제될 수 있음
+ :param pk_name: 삭제할 데이터를 특정하기 위한 컬럼명
+ :param pk_value: 삭제할 데이터를 특정하기 위한 변수
+ :return:
+ """
+ # pk_value가 1개 or 여러개
+ del_query = make_format("query", "term", {pk_name: pk_value})
+ # pk_value가 1개만
+ # del_query = {"query": make_format("match", pk_name, pk_value)}
+ self.connect.delete_by_query(index=self.index, body=del_query)
+
+ def set_pagination(self, size: int = 0, from_: int = 0) -> None:
+ """
+ 검색 결과를 어디서 부터 얼만큼 출력할지 설정하는 모듈
+ find 모듈을 사용하기 전에 선행 되어야함
+ """
+ self.size = size
+ self.page = size * from_
+
+ def prefix(self, body: dict, source: list = None) -> dict:
+ """
+ :param body:
+ :param source: 반환 받을 index의 필드 빈 list 값 이면 전체 출력
+ :return:
+ """
+ source = set_source(source)
+ prefix_query = make_format("query","prefix", body)
+ return self.connect.search(
+ index=self.index,
+ body=prefix_query,
+ size=self.size,
+ _source=source
+ )
+
+ def count(self, body: dict) -> int:
+ """
+ :param body: elasticsearch에 전송할 query
+ :return: query 결과로 나온 item 갯 수 """
+ return self.connect.count(index=self.index, body=body)["count"]
\ No newline at end of file
diff --git a/API_SERVICE/meta_service/ELKSearch/index.py b/common_libs/libs/els/ELKSearch/index.py
similarity index 100%
rename from API_SERVICE/meta_service/ELKSearch/index.py
rename to common_libs/libs/els/ELKSearch/index.py
diff --git a/common_libs/libs/els/ELKSearch/model.py b/common_libs/libs/els/ELKSearch/model.py
new file mode 100644
index 00000000..41393f8d
--- /dev/null
+++ b/common_libs/libs/els/ELKSearch/model.py
@@ -0,0 +1,36 @@
+from typing import List, Union
+
+from pydantic import BaseModel, Field
+
+
+class ElkIndexConfig(BaseModel):
+ host: str
+ port: str
+ index: str
+
+
+class CoreOption(BaseModel):
+ field: Union[list, str]
+ keywords: Union[list, str]
+ operator: str
+
+
+class SortOption(BaseModel):
+ field: str
+ order: str
+
+
+class RangeOption(BaseModel):
+ field: str
+ compare_dict: dict
+
+
+class InputModel(BaseModel):
+ index: str = ""
+ from_: int = Field(1, alias="from")
+ size: int = 10
+ resultField: list = []
+ sortOption: List[SortOption] = []
+ searchOption: List[CoreOption] = []
+ filterOption: List[CoreOption] = []
+ rangeOption: List[RangeOption] = []
diff --git a/common_libs/libs/els/__init__.py b/common_libs/libs/els/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/common_libs/libs/els/manager.py b/common_libs/libs/els/manager.py
new file mode 100644
index 00000000..82ceb4d5
--- /dev/null
+++ b/common_libs/libs/els/manager.py
@@ -0,0 +1,2 @@
+class ELSContextManager:
+ ...
diff --git a/common_libs/libs/exceptions.py b/common_libs/libs/exceptions.py
new file mode 100644
index 00000000..1962a80a
--- /dev/null
+++ b/common_libs/libs/exceptions.py
@@ -0,0 +1,2 @@
+class TokenDoesNotExist(Exception):
+ ...
diff --git a/common_libs/libs/middlewares/__init__.py b/common_libs/libs/middlewares/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/common_libs/libs/middlewares/keycloak_middleware.py b/common_libs/libs/middlewares/keycloak_middleware.py
new file mode 100644
index 00000000..d0479d51
--- /dev/null
+++ b/common_libs/libs/middlewares/keycloak_middleware.py
@@ -0,0 +1,66 @@
+from ast import literal_eval
+import logging
+from fastapi import HTTPException, Request
+from datetime import datetime
+
+from libs.auth.keycloak import KeycloakManager
+
+REFRESH_SEC = 60 * 50
+
+
+def get_token_from_cookie(cookies):
+ for k, v in cookies.items():
+ if "token" in k:
+ return k, v
+
+
+def refresh_token_from_cookie_wrapper(keycloak: KeycloakManager, **kwargs):
+ logger: logging.Logger = kwargs.get("logger")
+
+ async def refresh_with_cookie(request: Request, call_next):
+ dat = get_token_from_cookie(request.cookies)
+ if not dat:
+ logger.debug(f"token none :: {request.cookies}")
+ return await call_next(request)
+
+ logger.info(dat)
+ cookie_name = dat[0]
+ try:
+ token = literal_eval(dat[1])
+ if token.get("status_code") >= 400:
+ raise HTTPException(status_code=token.get("status_code"))
+ except Exception as e:
+ response = await call_next(request)
+ response.delete_cookie(cookie_name, domain="bigdata-car.kr")
+ return response
+
+ now = datetime.now().strftime("%s")
+ diffTime = REFRESH_SEC + 1
+ try:
+ createTime = token.get("create_time")
+ diffTime = float(now) - float(createTime)
+ except Exception:
+ pass
+
+ logger.info(f"createTime :: {createTime}")
+ logger.info(f"diffTime :: {diffTime}")
+
+ if diffTime > REFRESH_SEC:
+ logger.info("Refresh Token!!")
+ res = await keycloak.refresh_token(
+ realm=kwargs.get("realm"),
+ client_id=kwargs.get("client_id"),
+ client_secret=kwargs.get("client_secret"),
+ grant_type="refresh_token",
+ refresh_token=token["data"]["refresh_token"],
+ )
+ res["create_time"] = datetime.now().strftime("%s")
+ else:
+ logger.info("Token Maintain!!")
+ res = token
+
+ api_response = await call_next(request)
+ api_response.set_cookie(key=cookie_name, value=res)
+ return api_response
+
+ return refresh_with_cookie
diff --git a/login.Dockerfile b/login.Dockerfile
new file mode 100644
index 00000000..2c1eb3ee
--- /dev/null
+++ b/login.Dockerfile
@@ -0,0 +1,17 @@
+FROM python:3.9-alpine
+
+RUN apk add --update alpine-sdk && \
+ apk add --update --no-cache postgresql-client && \
+ apk add --update --no-cache --virtual .tmp-build-deps \
+ build-base gcc python3-dev postgresql-dev musl-dev libffi-dev openssl-dev cargo cmake openblas-dev
+
+COPY ./API_SERVICE/login_service /app/source/login_service
+COPY ./common_libs /app/common_libs
+WORKDIR /app/source/login_service
+
+RUN pip install --no-cache --upgrade pip && pip install -r requirements.txt
+
+ENV APP_ENV=prod
+ENV PYTHONPATH=/app/source:/app/common_libs
+
+CMD [ "gunicorn", "app.main:app", "-c", "gunicorn.conf.py"]
diff --git a/meta.Dockerfile b/meta.Dockerfile
new file mode 100644
index 00000000..a2cc5246
--- /dev/null
+++ b/meta.Dockerfile
@@ -0,0 +1,17 @@
+FROM python:3.9-alpine
+
+RUN apk add --update alpine-sdk && \
+ apk add --update --no-cache postgresql-client && \
+ apk add --update --no-cache --virtual .tmp-build-deps \
+ build-base gcc python3-dev postgresql-dev musl-dev libffi-dev openssl-dev cargo cmake openblas-dev
+
+COPY ./API_SERVICE/meta_service /app/source/meta_service
+COPY ./common_libs /app/common_libs
+WORKDIR /app/source/meta_service
+
+RUN pip install --no-cache --upgrade pip && pip install -r requirements.txt
+
+ENV APP_ENV=prod
+ENV PYTHONPATH=/app/source:/app/common_libs
+
+CMD [ "gunicorn", "app.main:app", "-c", "gunicorn.conf.py"]
diff --git a/mydisk.Dockerfile b/mydisk.Dockerfile
new file mode 100644
index 00000000..824757cd
--- /dev/null
+++ b/mydisk.Dockerfile
@@ -0,0 +1,17 @@
+FROM python:3.9-alpine
+
+RUN apk add --update alpine-sdk && \
+ apk add --update --no-cache postgresql-client && \
+ apk add --update --no-cache --virtual .tmp-build-deps \
+ build-base gcc python3-dev postgresql-dev musl-dev libffi-dev openssl-dev cargo cmake openblas-dev
+
+COPY ./API_SERVICE/mydisk_service /app/source/mydisk_service
+COPY ./common_libs /app/common_libs
+WORKDIR /app/source/mydisk_service
+
+RUN pip install --no-cache --upgrade pip && pip install -r requirements.txt
+
+ENV APP_ENV=prod
+ENV PYTHONPATH=/app/source:/app/common_libs
+
+CMD [ "gunicorn", "app.main:app", "-c", "gunicorn.conf.py"]
diff --git a/router.Dockerfile b/router.Dockerfile
new file mode 100644
index 00000000..6c910202
--- /dev/null
+++ b/router.Dockerfile
@@ -0,0 +1,20 @@
+FROM python:3.9-alpine
+
+RUN apk add --update alpine-sdk && \
+ apk add --update --no-cache postgresql-client && \
+ apk add --update --no-cache --virtual .tmp-build-deps \
+ build-base gcc python3-dev postgresql-dev musl-dev libffi-dev openssl-dev cargo cmake openblas-dev
+
+COPY ./API_ROUTER/router /app/source/router
+COPY ./common_libs /app/common_libs
+WORKDIR /app/source/router
+
+RUN pip install --no-cache --upgrade pip && pip install -r requirements.txt
+
+ENV APP_ENV=prod
+ENV PYTHONPATH=/app/source:/app/common_libs
+
+EXPOSE 8000
+
+CMD [ "gunicorn", "app.main:app", "-c", "gunicorn.conf.py"]
+