diff --git a/.gitignore b/.gitignore
new file mode 100644
index 00000000..f794df8a
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,12 @@
+/venv/
+__pycache__/
+.DS_Store
+.idea
+*.log
+build
+mobigen_*.egg*
+dist
+.python-version
+.env
+.vscode
+.coverage
\ No newline at end of file
diff --git a/API-ROUTER/ApiRoute/ApiRoute.py b/API-ROUTER/ApiRoute/ApiRoute.py
new file mode 100644
index 00000000..6aa09558
--- /dev/null
+++ b/API-ROUTER/ApiRoute/ApiRoute.py
@@ -0,0 +1,137 @@
+from fastapi.logger import logger
+from typing import Dict, List
+import importlib.util
+from fastapi import APIRouter
+from ApiRoute.ApiRouteConfig import config
+
+# from RouterUtils.CommonUtil import connect_db, save_file_for_reload, get_exception_info, delete_headers, kt_lamp # 함수 내부에 import로 수정
+from RouterUtils.RouteUtil import (
+ bypass_msg,
+ call_remote_func,
+ get_api_info,
+ make_route_response,
+)
+from pydantic import BaseModel
+from starlette.requests import Request
+from urllib import parse
+import logging
+import uuid
+
+
+trace_logger = logging.getLogger("trace")
+
+
+class ApiServerInfo(BaseModel):
+ nm: str
+ ip_adr: str
+ domn_nm: str
+
+
+class ApiParam(BaseModel):
+ api_nm: str
+ nm: str
+ data_type: str
+ deflt_val: str
+
+
+class ApiInfo(BaseModel):
+ api_nm: str
+ ctgry: str
+ route_url: str
+ url: str
+ meth: str
+ cmd: str
+ mode: str
+ params: List[ApiParam]
+
+
+class ApiRoute:
+ def __init__(self) -> None:
+ self.router = APIRouter()
+ self.set_route()
+
+ def set_route(self) -> None:
+ from RouterUtils.CommonUtil import connect_db
+
+ self.router.add_api_route(
+ "/api/reload", self.reload_api, methods=["GET"], tags=["API Info Reload"]
+ )
+
+ db = connect_db()
+ config.api_info, _ = db.select("SELECT * FROM api_item_bas;")
+ config.api_params, _ = db.select("SELECT * FROM api_item_param_dtl;")
+
+ config.api_server_info, _ = db.select("SELECT * FROM api_item_server_dtl")
+
+ for api in config.api_info:
+ method = str(api["mthd"]).split(",")
+ self.router.add_api_route(
+ api["route_url"],
+ self.route_api,
+ methods=method,
+ tags=[f'Route Category ({api["srvr_nm"]})'],
+ )
+
+ for api_name, conf_api_info in config.api_config.items():
+ module_name = f'RouterApiList.{conf_api_info["sub_dir"]}.{api_name}'
+ spec = importlib.util.find_spec(module_name)
+ module = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(module)
+ self.router.add_api_route(
+ f'{conf_api_info["url"]}',
+ module.api,
+ methods=[conf_api_info["method"]],
+ tags=[f'service [ {conf_api_info["sub_dir"]} ]'],
+ )
+
+ def reload_api(self):
+ from RouterUtils.CommonUtil import save_file_for_reload
+
+ logger.info("Reload API Info")
+ save_file_for_reload()
+ result = {"result": 1, "errorMessage": ""}
+ return result
+
+ async def route_api(self, request: Request) -> Dict:
+ # 함수 내부에 import로 수정
+ from RouterUtils.CommonUtil import get_exception_info, delete_headers, kt_lamp
+
+ route_url = request.url.path
+ method = request.method
+ access_token = ""
+ body = None
+ headers = delete_headers(
+ dict(request.headers), ["content-length", "user-agent"]
+ )
+
+ transaction_id = f'{config.lamp_info["service_code"]}_{uuid.uuid4()}'
+ headers["transactionId"] = transaction_id
+
+ try:
+ api_info, api_params = get_api_info(route_url)
+ # lamp 1
+ kt_lamp("IN_REQ", transaction_id, api_info["api_nm"])
+
+ if method == "POST":
+ body = await request.json()
+
+ params_query = parse.unquote(str(request.query_params))
+
+ logger.info(
+ f"\n- api_info : {api_info}\n- api_params : {api_params} \
+ \n- req body : {body}, params_query : {params_query}"
+ )
+ if api_info["mode"] == "MESSAGE PASSING":
+ result, access_token = await bypass_msg(
+ api_info, params_query, body, headers
+ )
+ else:
+ result = await call_remote_func(api_info, api_params, body)
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+
+ # lamp 6
+ kt_lamp("IN_RES", transaction_id, api_info["api_nm"])
+
+ return make_route_response(result, api_info["api_nm"], access_token)
diff --git a/API-ROUTER/ApiRoute/ApiRouteConfig.py b/API-ROUTER/ApiRoute/ApiRouteConfig.py
new file mode 100644
index 00000000..ebb4748f
--- /dev/null
+++ b/API-ROUTER/ApiRoute/ApiRouteConfig.py
@@ -0,0 +1,27 @@
+from typing import Dict, List
+from psycopg2 import pool
+
+
+class ApiRouteConfig:
+ root_path: str
+
+ db_type: str
+ db_info: Dict
+
+ remote_info: Dict
+
+ server_host: str
+ server_port: int
+
+ api_config: Dict
+
+ api_server_info: List[Dict]
+ api_info: List[Dict]
+ api_params: List[Dict]
+
+ secret_info: Dict
+ lamp_info: Dict
+ conn_pool: pool.SimpleConnectionPool
+
+
+config = ApiRouteConfig
diff --git a/API-ROUTER/ApiRoute/__init__.py b/API-ROUTER/ApiRoute/__init__.py
new file mode 100644
index 00000000..c1dfef31
--- /dev/null
+++ b/API-ROUTER/ApiRoute/__init__.py
@@ -0,0 +1,2 @@
+from .ApiRouteConfig import *
+from .ApiRoute import *
diff --git a/API-ROUTER/RouterApiList/__init__.py b/API-ROUTER/RouterApiList/__init__.py
new file mode 100644
index 00000000..e3a9f8af
--- /dev/null
+++ b/API-ROUTER/RouterApiList/__init__.py
@@ -0,0 +1 @@
+from .service import *
diff --git a/API-ROUTER/RouterApiList/service/TEST_1.py b/API-ROUTER/RouterApiList/service/TEST_1.py
new file mode 100644
index 00000000..b90ecf46
--- /dev/null
+++ b/API-ROUTER/RouterApiList/service/TEST_1.py
@@ -0,0 +1,7 @@
+from typing import Dict
+from ApiRoute.ApiRouteConfig import config
+
+
+def api(api_name: str) -> Dict:
+
+ return {"API_NAME": "TEST_1"}
diff --git a/API-ROUTER/RouterApiList/service/TEST_2.py b/API-ROUTER/RouterApiList/service/TEST_2.py
new file mode 100644
index 00000000..b90ecf46
--- /dev/null
+++ b/API-ROUTER/RouterApiList/service/TEST_2.py
@@ -0,0 +1,7 @@
+from typing import Dict
+from ApiRoute.ApiRouteConfig import config
+
+
+def api(api_name: str) -> Dict:
+
+ return {"API_NAME": "TEST_1"}
diff --git a/API-ROUTER/RouterApiList/service/__init__.py b/API-ROUTER/RouterApiList/service/__init__.py
new file mode 100644
index 00000000..785d16dc
--- /dev/null
+++ b/API-ROUTER/RouterApiList/service/__init__.py
@@ -0,0 +1,2 @@
+from .TEST_1 import *
+from .TEST_2 import *
diff --git a/API-ROUTER/RouterConnectManager/PostgresManager.py b/API-ROUTER/RouterConnectManager/PostgresManager.py
new file mode 100644
index 00000000..514a6b25
--- /dev/null
+++ b/API-ROUTER/RouterConnectManager/PostgresManager.py
@@ -0,0 +1,57 @@
+import psycopg2
+from typing import List, Dict, Tuple, Any
+from fastapi.logger import logger
+from ApiRoute.ApiRouteConfig import config
+
+
+class PostgresManager:
+ def __init__(self) -> None:
+ self.conn = self.connect()
+ self.cursor = self.conn.cursor()
+
+ def connect(self):
+ conn = config.conn_pool.getconn()
+
+ logger.info("PostgresManager Connect.")
+ return conn
+
+ def execute(self, sql: str) -> None:
+ try:
+ self.cursor.execute(sql)
+ self.conn.commit()
+ except (Exception, psycopg2.DatabaseError):
+ self.conn.rollback()
+ raise psycopg2.DatabaseError
+
+ def multiple_excute(self, sql_list: list) -> None:
+ try:
+ for index, sql in enumerate(sql_list):
+ logger.info(f"PostgresManager Multiple Execute. ({index}. {sql})")
+ self.cursor.execute(sql)
+ self.conn.commit()
+ except (Exception, psycopg2.DatabaseError):
+ self.conn.rollback()
+ raise psycopg2.DatabaseError
+
+ def select(
+ self, sql: str, count: int = None
+ ) -> Tuple[List[Dict[Any, Any]], List[Any]]:
+ self.execute(sql)
+ column_names = [desc[0] for desc in self.cursor.description]
+ if count is None:
+ rows = self.cursor.fetchall()
+ else:
+ rows = self.cursor.fetchmany(count)
+ logger.info(f"PostgresManager Select Execute. ({sql})")
+
+ result = []
+ for row in rows:
+ result.append(dict(zip(column_names, row)))
+ return result, column_names
+
+ def commit(self):
+ self.conn.commit()
+
+ def __del__(self) -> None:
+ self.cursor.close()
+ config.conn_pool.putconn(self.conn)
diff --git a/API-ROUTER/RouterConnectManager/__init__.py b/API-ROUTER/RouterConnectManager/__init__.py
new file mode 100644
index 00000000..b5c0391e
--- /dev/null
+++ b/API-ROUTER/RouterConnectManager/__init__.py
@@ -0,0 +1 @@
+from .PostgresManager import *
diff --git a/API-ROUTER/RouterUtils/CommonUtil.py b/API-ROUTER/RouterUtils/CommonUtil.py
new file mode 100644
index 00000000..c20f9036
--- /dev/null
+++ b/API-ROUTER/RouterUtils/CommonUtil.py
@@ -0,0 +1,165 @@
+import os
+import configparser
+import argparse
+import starlette.datastructures
+from fastapi.logger import logger
+from typing import Any, Dict, List
+from ApiRoute.ApiRouteConfig import config
+from RouterConnectManager import PostgresManager
+from psycopg2 import pool
+import sys
+import traceback
+import logging
+from datetime import datetime
+
+lamp = logging.getLogger("trace")
+
+
+def convert_data(data) -> str:
+ return f"'{str(data)}'"
+
+
+def set_log_path():
+ parser = configparser.ConfigParser()
+ parser.read(f"{config.root_path}/conf/logging.conf", encoding="utf-8")
+
+ parser.set(
+ "handler_rotatingFileHandler",
+ "args",
+ f"('{config.root_path}/log/API-Router.log', 'a', 20000000, 10)",
+ )
+
+ with open(f"{config.root_path}/conf/logging.conf", "w") as f:
+ parser.write(f)
+
+
+def get_config(config_name: str):
+ ano_cfg = {}
+
+ conf = configparser.ConfigParser()
+ config_path = config.root_path + f"/conf/{config_name}"
+ conf.read(config_path, encoding="utf-8")
+ for section in conf.sections():
+ ano_cfg[section] = {}
+ for option in conf.options(section):
+ ano_cfg[section][option] = conf.get(section, option)
+
+ return ano_cfg
+
+
+def parser_params() -> Any:
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--host", type=str, default="127.0.0.1")
+ parser.add_argument("--port", type=int, default=18000)
+ parser.add_argument("--db_type", default="test")
+
+ return parser.parse_args()
+
+
+def prepare_config(root_path) -> None:
+ args = parser_params()
+ config.root_path = root_path
+ api_router_cfg = get_config("config.ini")
+ config.api_config = get_config("api_config.ini")
+ config.db_type = f"{args.db_type}_db"
+ config.server_host = args.host
+ config.server_port = args.port
+ config.db_info = api_router_cfg[config.db_type]
+ config.conn_pool = make_connection_pool(config.db_info)
+ config.remote_info = api_router_cfg["remote"]
+ config.secret_info = api_router_cfg["secret_info"]
+ config.lamp_info = api_router_cfg["lamp_info"]
+
+
+def make_connection_pool(db_info):
+ conn_pool = pool.SimpleConnectionPool(
+ 1,
+ 20,
+ user=db_info["user"],
+ password=db_info["password"],
+ host=db_info["host"],
+ port=db_info["port"],
+ database=db_info["database"],
+ options=f'-c search_path={db_info["schema"]}',
+ connect_timeout=10,
+ )
+ return conn_pool
+
+
+def connect_db():
+ db = PostgresManager()
+ return db
+
+
+def save_file_for_reload():
+ with open(f"{config.root_path}/server.py", "a") as fd:
+ fd.write(" ")
+
+
+def make_res_msg(result, err_msg, data=None, column_names=None):
+ header_list = []
+ for column_name in column_names:
+ header = {"column_name": column_name}
+ header_list.append(header)
+
+ if data is None or column_names is None:
+ res_msg = {"result": result, "errorMessage": err_msg}
+ else:
+ res_msg = {
+ "result": result,
+ "errorMessage": err_msg,
+ "body": data,
+ "header": header_list,
+ }
+ return res_msg
+
+
+def get_exception_info():
+ ex_type, ex_value, ex_traceback = sys.exc_info()
+ trace_back = traceback.extract_tb(ex_traceback)
+ trace_log = "\n".join([str(trace) for trace in trace_back])
+ logger.error(
+ f"\n- Exception Type : {ex_type}\n- Exception Message : {str(ex_value).strip()}\n- Exception Log : \n{trace_log}"
+ )
+ return ex_type.__name__
+
+
+def delete_headers(headers: Dict, delete_header: List) -> Dict:
+ for delete in delete_header:
+ if headers.get(delete):
+ del headers[delete]
+ return headers
+
+
+def kt_lamp(
+ log_type: str,
+ transaction_id: str,
+ operation: str,
+ res_type: str = "I",
+ res_code: str = "",
+ res_desc: str = "",
+):
+ if operation in config.lamp_info["api_list"].split(","):
+ lamp_form = {}
+ now = datetime.now()
+ lamp_form["timestamp"] = now.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]
+ lamp_form["service"] = config.lamp_info["service_code"]
+ lamp_form["operation"] = f'{config.lamp_info["prefix"]}_{operation}'
+ lamp_form["transactionId"] = transaction_id
+ lamp_form["logType"] = log_type
+
+ lamp_form["host"] = {}
+ lamp_form["host"]["name"] = config.lamp_info["host_name"]
+ lamp_form["host"]["ip"] = config.lamp_info["host_ip"]
+
+ if log_type == "OUT_REQ":
+ lamp_form["destination"] = {}
+ lamp_form["destination"]["name"] = config.lamp_info["dest_name"]
+ lamp_form["destination"]["ip"] = config.lamp_info["dest_ip"]
+ elif log_type == "OUT_RES" or log_type == "IN_RES":
+ lamp_form["response"] = {}
+ lamp_form["response"]["type"] = res_type
+ lamp_form["response"]["code"] = res_code
+ lamp_form["response"]["desc"] = res_desc
+
+ lamp.info(lamp_form)
diff --git a/API-ROUTER/RouterUtils/RouteUtil.py b/API-ROUTER/RouterUtils/RouteUtil.py
new file mode 100644
index 00000000..bfa3f73a
--- /dev/null
+++ b/API-ROUTER/RouterUtils/RouteUtil.py
@@ -0,0 +1,131 @@
+import asyncssh
+import aiohttp
+from fastapi.logger import logger
+from fastapi.responses import JSONResponse
+from urllib.parse import ParseResult
+from ApiRoute.ApiRouteConfig import config
+
+# from RouterUtils.CommonUtil import get_exception_info, kt_lamp
+from typing import Dict
+
+
+def make_url(server_name: str, url_path: str):
+ for server_info in config.api_server_info:
+ if server_info["srvr_nm"] == server_name:
+ if len(server_info["ip_adr"]) != 0:
+ netloc = server_info["ip_adr"]
+ else:
+ netloc = server_info["domn_nm"]
+ url = ParseResult(
+ scheme="http",
+ netloc=netloc,
+ path=url_path,
+ params="",
+ query="",
+ fragment="",
+ )
+ logger.info(f"Message Passing Url : {url.geturl()}")
+ return url.geturl()
+ return None
+
+
+def make_route_response(result, api_name, access_token):
+ response = JSONResponse(content=result)
+ add_cookie_api_list = config.secret_info["add_cookie_api"].split(",")
+ if api_name in add_cookie_api_list:
+ response.set_cookie(
+ key=config.secret_info["cookie_name"],
+ value=access_token,
+ max_age=3600,
+ secure=False,
+ httponly=True,
+ )
+ return response
+
+
+def get_api_info(route_url):
+ api_info = None
+ api_params = []
+ for api in config.api_info:
+ if api["route_url"] == route_url:
+ api_info = api
+ for params in config.api_params:
+ if params["api_nm"] == api["api_nm"]:
+ api_params.append(params)
+ break
+ return api_info, api_params
+
+
+async def bypass_msg(api_info, params_query, body, headers):
+ from RouterUtils.CommonUtil import kt_lamp
+
+ # lamp 2
+ kt_lamp("OUT_REQ", headers["transactionId"], api_info["api_nm"])
+
+ method = api_info["mthd"]
+ url = make_url(api_info["srvr_nm"], api_info["url"])
+ if url is None:
+ return {"result": 0, "errorMessage": "The server info does not exist."}
+
+ async with aiohttp.ClientSession() as session:
+ if method == "GET":
+ params = {}
+ if len(params_query) != 0:
+ for param in params_query.split("&"):
+ parser_param = param.split("=")
+ params[parser_param[0]] = parser_param[1]
+
+ async with session.get(url, params=params, headers=headers) as response:
+ access_token = response.cookies.get(config.secret_info["cookie_name"])
+ result = await response.json()
+ elif method == "POST":
+ async with session.post(url, json=body, headers=headers) as response:
+ access_token = response.cookies.get(config.secret_info["cookie_name"])
+ result = await response.json()
+ else:
+ logger.error(f"Method Not Allowed. {method}")
+ result = {"result": 0, "errorMessage": "Method Not Allowed."}
+
+ # lamp 5
+ kt_lamp("OUT_RES", headers["transactionId"], api_info["api_nm"])
+ return result, access_token
+
+
+async def run_cmd(cmd: str):
+ async with asyncssh.connect(
+ host=config.remote_info["host"],
+ port=int(config.remote_info["port"]),
+ username=config.remote_info["id"],
+ password=config.remote_info["password"],
+ known_hosts=None,
+ ) as conn:
+ logger.info(f"Run Cmd : {cmd}")
+ result = await conn.run(cmd, check=True)
+ logger.info(f"Command Result : {result.stdout}")
+ return result.stdout
+
+
+async def call_remote_func(api_info, api_params, input_params) -> Dict:
+ from RouterUtils.CommonUtil import get_exception_info
+
+ command_input = ""
+ for api_param in api_params:
+ try:
+ data = input_params[api_param["nm"]]
+ if not data:
+ data = api_param["deflt_val"]
+ command_input += f' --{api_param["nm"]} {data}'
+ except KeyError:
+ logger.error(f'parameter set default value. [{api_param["nm"]}]')
+ command_input += f' --{api_param["nm"]} {api_param["deflt_val"]}'
+
+ cmd = f'{api_info["cmd"]} {command_input}'
+
+ try:
+ result = await run_cmd(cmd)
+ except Exception:
+ except_name = get_exception_info()
+ res_msg = {"result": 0, "errorMessage": except_name}
+ else:
+ res_msg = {"result": 1, "errorMessage": "", "data": eval(result)}
+ return res_msg
diff --git a/API-ROUTER/RouterUtils/__init__.py b/API-ROUTER/RouterUtils/__init__.py
new file mode 100644
index 00000000..22c1052e
--- /dev/null
+++ b/API-ROUTER/RouterUtils/__init__.py
@@ -0,0 +1,2 @@
+from .CommonUtil import *
+from .RouteUtil import *
diff --git a/API-ROUTER/build.sh b/API-ROUTER/build.sh
new file mode 100755
index 00000000..7e4fd316
--- /dev/null
+++ b/API-ROUTER/build.sh
@@ -0,0 +1,8 @@
+rm -rf build dist mobigen_router.egg-info
+pip uninstall mobigen_router -y
+
+python setup.py bdist_wheel
+
+pip install ./dist/mobigen_router-0.5-py3-none-any.whl
+
+
diff --git a/API-ROUTER/conf/api_config.ini b/API-ROUTER/conf/api_config.ini
new file mode 100644
index 00000000..8e1c9a95
--- /dev/null
+++ b/API-ROUTER/conf/api_config.ini
@@ -0,0 +1,9 @@
+[TEST_1]
+method = GET
+url = /api
+sub_dir = service
+
+[TEST_2]
+method = GET
+url = /api
+sub_dir = service
\ No newline at end of file
diff --git a/API-ROUTER/conf/config.ini b/API-ROUTER/conf/config.ini
new file mode 100644
index 00000000..bc6d2b6e
--- /dev/null
+++ b/API-ROUTER/conf/config.ini
@@ -0,0 +1,34 @@
+[remote]
+host = 192.168.101.44
+port = 10022
+id = deep
+password = !deep@win#human$5
+
+[test_db]
+host = 192.168.100.126
+port = 25432
+user = dpsi
+password = hello.sitemng12#$
+database = ktportal
+schema = sitemng
+
+[commercial_db]
+host = 10.220.184.63
+port = 5432
+user = dpsi
+password = 22DOCean.@!~
+database = ktportal
+schema = sitemng
+
+[secret_info]
+cookie_name = user-docean-access-token
+add_cookie_api = userLogin,commonLogout,commonToken
+
+[lamp_info]
+service_code = PG016701
+prefix = router
+host_name = portal
+host_ip = 192.168.100.126
+dest_name = portal
+dest_ip = 192.168.100.126
+api_list = userLogin
\ No newline at end of file
diff --git a/API-ROUTER/conf/logging.conf b/API-ROUTER/conf/logging.conf
new file mode 100644
index 00000000..0fe35ec3
--- /dev/null
+++ b/API-ROUTER/conf/logging.conf
@@ -0,0 +1,43 @@
+[loggers]
+keys = root,trace
+
+[logger_root]
+level = INFO
+handlers = console,rotatingFileHandler
+
+[logger_trace]
+level = INFO
+qualname = trace
+handlers = traceHandler
+propagate = 0
+
+[formatters]
+keys = default,trace
+
+[formatter_default]
+format = %(asctime)s %(levelname)s [%(filename)s:%(lineno)d] - %(message)s
+
+[formatter_trace]
+format = %(message)s
+
+[handlers]
+keys = console,rotatingFileHandler,traceHandler
+
+[handler_console]
+class = StreamHandler
+args = (sys.stdout,)
+formatter = default
+level = INFO
+
+[handler_rotatingFileHandler]
+class = handlers.RotatingFileHandler
+formatter = default
+args = ('/Users/cbc/DEV/Mobigen/API_DataPortal/KT/AP_API_Router/API-ROUTER/log/API-Router.log', 'a', 20000000, 10)
+level = INFO
+
+[handler_traceHandler]
+class = handlers.RotatingFileHandler
+formatter = trace
+args = ('Router-Trace.log', 'a', 20000000, 10)
+level = INFO
+
diff --git a/API-ROUTER/safe_start.sh b/API-ROUTER/safe_start.sh
new file mode 100755
index 00000000..2f40f0fe
--- /dev/null
+++ b/API-ROUTER/safe_start.sh
@@ -0,0 +1,62 @@
+app_name=API-Router
+router_host=$1
+router_port=$2
+router_db=$3
+
+input() {
+ if [[ $router_host == "" ]];then
+ router_host=192.168.100.126
+ fi
+ if [[ $router_port == "" ]];then
+ router_port=9010
+ fi
+ if [[ $router_db == "" ]];then
+ router_db=test
+ fi
+}
+
+router_stop() {
+ app=$( ps -ef | grep python | grep server.py | grep ${router_host} | grep ${router_port} | awk '{print $2}' )
+ if [[ $app != "" ]];then
+ exit_app="kill -9 ${app}"
+ echo "Stop Command ( router ) : "${exit_app}
+ $exit_app
+ else
+ echo "Not Found application. ( router )"
+ fi
+}
+
+uvicorn_stop() {
+ uvicorn=$( netstat -nlp | grep ${router_host}':'${router_port} | awk '{print $7}' | tr "/" "\n" )
+ if [[ $uvicorn != "" ]];then
+ for i in $uvicorn
+ do
+ if [[ ${i} == *python* ]];then
+ continue
+ fi
+ exit_uvicorn="kill -9 ${i}"
+ echo "Stop Command ( uvicorn ) : "${exit_uvicorn}
+ $exit_uvicorn
+ done
+ else
+ echo "Not Found application. ( uvicorn )"
+ fi
+}
+
+router_start() {
+ source_path="$( cd "$( dirname "$0" )" && pwd -P )"
+ router_exec="nohup python3.8 ${source_path}/server.py --host ${router_host} --port ${router_port} --db_type ${router_db} 1> /dev/null 2>&1 &"
+ echo "Start Command : ${router_exec}"
+ nohup python3.8 ${source_path}/server.py --host ${router_host} --port ${router_port} --db_type ${router_db} 1> /dev/null 2>&1 &
+}
+
+echo "########## Safe Start (${app_name}) ##########"
+echo "========== STOP ${app_name} =========="
+input
+
+router_stop
+sleep 2
+uvicorn_stop
+
+echo "========== START ${app_name} =========="
+router_start
diff --git a/API-ROUTER/server.py b/API-ROUTER/server.py
new file mode 100644
index 00000000..d3bf1133
--- /dev/null
+++ b/API-ROUTER/server.py
@@ -0,0 +1,30 @@
+from fastapi import FastAPI
+import uvicorn
+from pathlib import Path
+from ApiRoute.ApiRouteConfig import config
+from RouterUtils.CommonUtil import prepare_config, set_log_path
+from ApiRoute import ApiRoute
+import os
+
+root_path = str(Path(os.path.dirname(os.path.abspath(__file__))))
+prepare_config(root_path)
+api_router = ApiRoute()
+app = FastAPI()
+app.include_router(api_router.router)
+
+if __name__ == "__main__":
+ log_dir = f"{config.root_path}/log"
+ if os.path.isdir(log_dir):
+ print(f"Directory Exists")
+ else:
+ print(f"Make log dir : {log_dir}")
+ os.makedirs(log_dir)
+
+ set_log_path()
+ uvicorn.run(
+ "server:app",
+ host=config.server_host,
+ port=config.server_port,
+ reload=True,
+ log_config=f"{config.root_path}/conf/logging.conf",
+ )
diff --git a/API-ROUTER/setup.py b/API-ROUTER/setup.py
new file mode 100644
index 00000000..413981ed
--- /dev/null
+++ b/API-ROUTER/setup.py
@@ -0,0 +1,10 @@
+from setuptools import setup, find_packages
+
+setup(
+ name="mobigen_router",
+ version="0.5",
+ author="mobigen",
+ author_email="cbccbs@mobigen.co.kr",
+ python_requires=">=3.6",
+ packages=find_packages(exclude=["docs", "tests*", "__pycache__/"]),
+)
diff --git a/API-ROUTER/start.sh b/API-ROUTER/start.sh
new file mode 100644
index 00000000..e05ac0c9
--- /dev/null
+++ b/API-ROUTER/start.sh
@@ -0,0 +1,29 @@
+app_name=API-Router
+router_host=$1
+router_port=$2
+router_db=$3
+
+input() {
+ if [[ $router_host == "" ]];then
+ router_host=192.168.100.126
+ fi
+ if [[ $router_port == "" ]];then
+ router_port=9010
+ fi
+ if [[ $router_db == "" ]];then
+ router_db=test
+ fi
+}
+
+router_start() {
+ source_path="$( cd "$( dirname "$0" )" && pwd -P )"
+ router_exec="nohup python3.8 ${source_path}/server.py --host ${router_host} --port ${router_port} --db_type ${router_db} 1> /dev/null 2>&1 &"
+ echo "Start Command : ${router_exec}"
+ nohup python3.8 ${source_path}/server.py --host ${router_host} --port ${router_port} --db_type ${router_db} --db_type ${router_db} 1> /dev/null 2>&1 &
+}
+
+echo "########## Start Application (${app_name}) ##########"
+echo "========== START ${app_name} =========="
+input
+
+router_start
diff --git a/API-ROUTER/stop.sh b/API-ROUTER/stop.sh
new file mode 100644
index 00000000..3f9cb035
--- /dev/null
+++ b/API-ROUTER/stop.sh
@@ -0,0 +1,47 @@
+app_name=API-Router
+router_host=$1
+router_port=$2
+
+input() {
+ if [[ $router_host == "" ]];then
+ router_host=192.168.100.126
+ fi
+ if [[ $router_port == "" ]];then
+ router_port=9010
+ fi
+}
+
+router_stop() {
+ app=$( ps -ef | grep python | grep server.py | grep ${router_host} | grep ${router_port} | awk '{print $2}' )
+ if [[ $app != "" ]];then
+ exit_app="kill -9 ${app}"
+ echo "Stop Command ( router ) : "${exit_app}
+ $exit_app
+ else
+ echo "Not Found application. ( router )"
+ fi
+}
+
+uvicorn_stop() {
+ uvicorn=$( netstat -nlp | grep ${router_host}':'${router_port} | awk '{print $7}' | tr "/" "\n" )
+ if [[ $uvicorn != "" ]];then
+ for i in $uvicorn
+ do
+ if [[ ${i} == *python* ]];then
+ continue
+ fi
+ exit_uvicorn="kill -9 ${i}"
+ echo "Stop Command ( uvicorn ) : "${exit_uvicorn}
+ $exit_uvicorn
+ done
+ else
+ echo "Not Found application. ( uvicorn )"
+ fi
+}
+
+echo "########## Stop Application (${app_name}) ##########"
+echo "========== STOP ${app_name} =========="
+input
+router_stop
+sleep 2
+uvicorn_stop
diff --git a/API-SERVICE/ApiService/ApiService.py b/API-SERVICE/ApiService/ApiService.py
new file mode 100644
index 00000000..a90816dd
--- /dev/null
+++ b/API-SERVICE/ApiService/ApiService.py
@@ -0,0 +1,25 @@
+import importlib.util
+from fastapi.logger import logger
+from fastapi import APIRouter
+from ApiService.ApiServiceConfig import config
+
+
+class ApiService:
+ def __init__(self) -> None:
+ self.router = APIRouter()
+ self.set_route()
+
+ def set_route(self) -> None:
+ for api_name, api_info in config.api_config.items():
+ if config.category in api_info["sub_dir"]:
+ module_name = f'ServiceApiList.{api_info["sub_dir"]}.{api_name}'
+ logger.info(module_name)
+ spec = importlib.util.find_spec(module_name)
+ module = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(module)
+ self.router.add_api_route(
+ f'{api_info["url"]}',
+ module.api,
+ methods=[api_info["method"]],
+ tags=[f'service [ {api_info["sub_dir"]} ]'],
+ )
diff --git a/API-SERVICE/ApiService/ApiServiceConfig.py b/API-SERVICE/ApiService/ApiServiceConfig.py
new file mode 100644
index 00000000..9bfcdd56
--- /dev/null
+++ b/API-SERVICE/ApiService/ApiServiceConfig.py
@@ -0,0 +1,27 @@
+from typing import Dict
+from psycopg2 import pool
+
+
+class ApiServiceConfig:
+ root_path: str
+
+ category: str
+
+ db_type: str
+ db_info: Dict
+
+ remote_info: Dict
+
+ server_host: str
+ server_port: int
+
+ api_config: Dict
+
+ secret_info: Dict
+ user_info: Dict
+ lamp_info: Dict
+ ldap_info: Dict
+ conn_pool: pool.SimpleConnectionPool
+
+
+config = ApiServiceConfig
diff --git a/API-SERVICE/ApiService/__init__.py b/API-SERVICE/ApiService/__init__.py
new file mode 100644
index 00000000..9213060d
--- /dev/null
+++ b/API-SERVICE/ApiService/__init__.py
@@ -0,0 +1,2 @@
+from .ApiService import *
+from .ApiServiceConfig import *
diff --git a/API-SERVICE/ELKSearch/.gitignore b/API-SERVICE/ELKSearch/.gitignore
new file mode 100644
index 00000000..fd1b0921
--- /dev/null
+++ b/API-SERVICE/ELKSearch/.gitignore
@@ -0,0 +1,2 @@
+/.idea/
+*.iml
\ No newline at end of file
diff --git a/API-SERVICE/ELKSearch/Manager/__init__.py b/API-SERVICE/ELKSearch/Manager/__init__.py
new file mode 100644
index 00000000..b6e690fd
--- /dev/null
+++ b/API-SERVICE/ELKSearch/Manager/__init__.py
@@ -0,0 +1 @@
+from . import *
diff --git a/API-SERVICE/ELKSearch/Manager/manager.py b/API-SERVICE/ELKSearch/Manager/manager.py
new file mode 100644
index 00000000..4cc8ba7e
--- /dev/null
+++ b/API-SERVICE/ELKSearch/Manager/manager.py
@@ -0,0 +1,79 @@
+from typing import Dict, Any, Union
+from elasticsearch import Elasticsearch
+from ELKSearch.Utils.elasticsearch_utils import make_query
+
+
+class ElasticSearchManager:
+ def __init__(
+ self,
+ host: str = "10.217.59.133",
+ port: str = "9200",
+ page: int = 1,
+ size: int = 10,
+ index: str = "biz_meta",
+ ):
+ """
+ set elasticsearch connect && DSL query setting function
+ :param host: elasticsearch host ip addr, default = localhost
+ :param port: elasticsearch ip port number, default = 9200
+ :param index:
+ :param page: page, size * page , elasticsearch default value = 0
+ :param size: 아이템 개수 , elasticsearch default value = 10
+ """
+ self.host = host
+ self.port = port
+ self.size = size
+ self.index = index
+ self.cur_from = size * page
+ self.conn = self.connect()
+ self.body = self.set_default_option()
+
+ def connect(self) -> Elasticsearch:
+ es = Elasticsearch(f"http://{self.host}:{self.port}")
+ return es
+
+ def set_default_option(self) -> Dict[Any, Any]:
+ # 유지 보수를 위해 model 적용 안 함
+ self.body = {
+ "sort": [],
+ }
+ return self.body
+
+ def set_sort(self, sort: list) -> None:
+ self.body["sort"] = sort
+
+ def set_pagination(self, size: int, from_: int) -> None:
+ self.size = size
+ self.cur_from = size * from_
+
+ def search(self, source=...):
+ return self.conn.search(
+ index=self.index,
+ body=self.body,
+ from_=self.cur_from,
+ size=self.size,
+ _source=source,
+ )
+
+ def insert(self, body: dict, doc_id: str) -> None:
+ return self.conn.index(index=self.index, body=body, id=doc_id)
+
+ def update(self, body: dict, doc_id: str):
+ return self.conn.update(index=self.index, id=doc_id, body=body)
+
+ def delete(self, field: str, data: Union[str, list]):
+ """
+ 단수 : { query: { term: _id}}
+ 복수 : { query : { term : []}}
+ :param field: data type str, elasticsearch index _source name
+ :param data: data type str or list
+ """
+ delete_data = {field: data}
+ delete_command = make_query("query", "term", delete_data)
+ return self.conn.delete_by_query(index=self.index, body=delete_command)
+
+ def prefix(self, keyword: dict, source=...):
+ prefix_query = make_query("query", "prefix", keyword)
+ return self.conn.search(
+ index=self.index, body=prefix_query, size=self.size, _source=source
+ )
diff --git a/API-SERVICE/ELKSearch/README.md b/API-SERVICE/ELKSearch/README.md
new file mode 100644
index 00000000..15643c0c
--- /dev/null
+++ b/API-SERVICE/ELKSearch/README.md
@@ -0,0 +1,2 @@
+# pyes
+python elasticsearch controller
diff --git a/API-SERVICE/ELKSearch/Utils/__init__.py b/API-SERVICE/ELKSearch/Utils/__init__.py
new file mode 100644
index 00000000..b6e690fd
--- /dev/null
+++ b/API-SERVICE/ELKSearch/Utils/__init__.py
@@ -0,0 +1 @@
+from . import *
diff --git a/API-SERVICE/ELKSearch/Utils/database_utils.py b/API-SERVICE/ELKSearch/Utils/database_utils.py
new file mode 100644
index 00000000..e08d7411
--- /dev/null
+++ b/API-SERVICE/ELKSearch/Utils/database_utils.py
@@ -0,0 +1,104 @@
+import argparse
+import configparser
+from psycopg2 import pool
+from typing import List, Dict, Tuple, Any
+from ELKSearch.Manager.manager import ElasticSearchManager
+
+
+class ElsSearchConfig:
+ root_path: str
+ category: str
+
+ db_type: str
+ db_info: Dict
+
+ els_type: str
+ els_info: Dict
+ check: bool
+
+ conn_pool: pool.SimpleConnectionPool
+ es: ElasticSearchManager
+
+
+config = ElsSearchConfig
+
+
+def get_config(root_path, config_name: str):
+ ano_cfg = {}
+
+ conf = configparser.ConfigParser()
+ config_path = root_path + f"/ELKSearch/conf/{config_name}"
+ conf.read(config_path, encoding="utf-8")
+ for section in conf.sections():
+ ano_cfg[section] = {}
+ for option in conf.options(section):
+ ano_cfg[section][option] = conf.get(section, option)
+
+ return ano_cfg
+
+
+def parser_params() -> Any:
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--category", default="local")
+ parser.add_argument("--db_type", default="local")
+ parser.add_argument("--check", default="True")
+
+ return parser.parse_args()
+
+
+def prepare_config(root_path) -> None:
+ args = parser_params()
+ config.root_path = root_path
+ config.category = args.category
+
+ db_config = get_config(root_path, "db_config.ini")
+ els_config = get_config(root_path, "config.ini")
+
+ config.els_type = args.category
+ config.els_info = els_config[args.category]
+ config.es = ElasticSearchManager(**config.els_info)
+ config.check = args.check
+
+ config.db_type = f"{args.db_type}_db"
+ config.db_info = db_config[config.db_type]
+ config.conn_pool = make_connection_pool(config.db_info)
+
+
+def make_connection_pool(db_info):
+ conn_pool = pool.SimpleConnectionPool(
+ 1,
+ 20,
+ user=db_info["user"],
+ password=db_info["password"],
+ host=db_info["host"],
+ port=db_info["port"],
+ database=db_info["database"],
+ options=f'-c search_path={db_info["schema"]}',
+ connect_timeout=10,
+ )
+ return conn_pool
+
+
+def connect_db():
+ conn = config.conn_pool.getconn()
+ return conn
+
+
+def execute(conn, cursor, sql) -> None:
+ cursor.execute(sql)
+ conn.commit()
+
+
+def select(conn, sql: str, count: int = None) -> Tuple[List[Dict[Any, Any]], List[Any]]:
+ cursor = conn.cursor()
+ execute(conn, cursor, sql)
+ column_names = [desc[0] for desc in cursor.description]
+ if count is None:
+ rows = cursor.fetchall()
+ else:
+ rows = cursor.fetchmany(count)
+
+ result = []
+ for row in rows:
+ result.append(dict(zip(column_names, row)))
+ return result, column_names
diff --git a/API-SERVICE/ELKSearch/Utils/elasticsearch_utils.py b/API-SERVICE/ELKSearch/Utils/elasticsearch_utils.py
new file mode 100644
index 00000000..e7035597
--- /dev/null
+++ b/API-SERVICE/ELKSearch/Utils/elasticsearch_utils.py
@@ -0,0 +1,42 @@
+from typing import Dict, Any
+
+
+def is_space(text: str) -> int:
+ if " " in text:
+ result = 1
+ else:
+ result = 0
+ return result
+
+
+def make_query(operator, field, value) -> Dict[Any, Any]:
+ query = {operator: {field: value}}
+ return query
+
+
+def base_search_query(action: str, sub_action: str, item_list: list) -> Dict:
+ item_dict = {sub_action: []}
+
+ for item in item_list:
+ if len(item.keywords):
+ words = " ".join(item.keywords).strip()
+
+ # field div
+ if 1 < len(item.field):
+ key = "multi_match"
+ detail = {
+ "fields": item.field,
+ "operator": item.operator,
+ "type": "phrase_prefix",
+ }
+ query = make_query(key, action, words)
+ query[key].update(detail)
+ else:
+ key = "match"
+ detail = {action: words, "operator": item.operator}
+ query = make_query(key, item.field[0], detail)
+ # query 추가
+ item_dict[sub_action].append(query)
+ else:
+ continue
+ return item_dict
diff --git a/API-SERVICE/ELKSearch/Utils/model.py b/API-SERVICE/ELKSearch/Utils/model.py
new file mode 100644
index 00000000..46f77852
--- /dev/null
+++ b/API-SERVICE/ELKSearch/Utils/model.py
@@ -0,0 +1,25 @@
+from pydantic import BaseModel, Field
+from typing import List, Union
+
+
+class ConfigOption(BaseModel):
+ field: Union[list, str]
+ keywords: list
+ operator: str
+
+
+class SortOption(BaseModel):
+ field: str
+ order: str
+
+
+class InputModel(BaseModel):
+ chk: bool = False
+ u_id: str
+ index: str = "biz_meta"
+ from_: int = Field(1, alias="from")
+ size: int = 10
+ resultField: list = []
+ sortOption: List[SortOption] = []
+ searchOption: List[ConfigOption] = []
+ filterOption: List[ConfigOption] = []
diff --git a/API-SERVICE/ELKSearch/__init__.py b/API-SERVICE/ELKSearch/__init__.py
new file mode 100644
index 00000000..781be988
--- /dev/null
+++ b/API-SERVICE/ELKSearch/__init__.py
@@ -0,0 +1,2 @@
+from .Manager import *
+from .Utils import *
diff --git a/API-SERVICE/ELKSearch/conf/config.ini b/API-SERVICE/ELKSearch/conf/config.ini
new file mode 100644
index 00000000..19035ddd
--- /dev/null
+++ b/API-SERVICE/ELKSearch/conf/config.ini
@@ -0,0 +1,29 @@
+# API-SERVICE elasticsearch config
+[commercial]
+host = 10.217.59.133
+port = 9200
+
+[local]
+host = localhost
+port = 9200
+
+[test]
+host = 192.168.101.44
+port = 39200
+
+# els_update.py elasticsearch config
+[data]
+host = 192.168.101.44
+port = 39200
+index = kt_biz_data
+
+[assets]
+host = 192.168.101.44
+port = 39200
+index = kt_biz_asset
+
+# send_email.py email config
+[email]
+host = 14.63.245.51
+port = 25
+from_addr = DataOcean@kt.com
\ No newline at end of file
diff --git a/API-SERVICE/ELKSearch/conf/db_config.ini b/API-SERVICE/ELKSearch/conf/db_config.ini
new file mode 100644
index 00000000..1412bc7e
--- /dev/null
+++ b/API-SERVICE/ELKSearch/conf/db_config.ini
@@ -0,0 +1,32 @@
+[local_db]
+host = localhost
+port = 5432
+user = postgres
+password = 0312
+database = dataportal
+schema = meta
+
+[kt_db]
+host = 10.220.184.63
+port = 5432
+user = dpme
+password = 22DOCean.@!~
+database = ktportal
+schema = meta
+
+[test_db]
+host = 192.168.100.126
+port = 25432
+user = dpme
+password = hello.meta12#$
+database = ktportal
+schema = meta
+
+# send_email.py config
+[email_db]
+host = 192.168.100.126
+port = 25432
+user = dpme
+password = hello.meta12#$
+database = ktportal
+schema = sitemng
diff --git a/API-SERVICE/ELKSearch/conf/mapping.json b/API-SERVICE/ELKSearch/conf/mapping.json
new file mode 100644
index 00000000..ff2c2117
--- /dev/null
+++ b/API-SERVICE/ELKSearch/conf/mapping.json
@@ -0,0 +1,205 @@
+{
+ "settings": {
+ "queries.cache.enabled": "true",
+ "refresh_interval":"10s",
+ "max_shingle_diff": 10,
+ "analysis": {
+ "tokenizer": {
+ "nori_user_dic": {
+ "type": "nori_tokenizer",
+ "decompound_mode": "discard",
+ "user_dictionary": "user_dic.txt"
+ }
+ },
+ "filter": {
+ "nori_pos": {
+ "type": "nori_part_of_speech",
+ "stoptags": [
+ "E", "J", "SC", "SE", "SF", "SP", "SSC", "SSO", "SY", "VCN", "VCP", "VSV", "VX", "XPN", "XSA", "XSN", "XSV"
+ ]
+ },
+ "synonym": {
+ "type": "synonym_graph",
+ "synonyms_path": "synonyms.txt"
+ },
+ "stopwords": {
+ "type": "stop",
+ "stopwords_path": "stopwords.txt"
+ },
+ "shingle_ten": {
+ "type": "shingle",
+ "token_separator": "",
+ "max_shingle_size": 10
+ }
+ },
+ "analyzer": {
+ "korean_analyzer": {
+ "tokenizer": "nori_user_dic",
+ "filter": [
+ "nori_pos", "nori_readingform", "lowercase", "synonym", "stopwords", "remove_duplicates", "shingle_ten"
+ ]
+ }
+ }
+ }
+ },
+ "mappings": {
+ "properties": {
+ "biz_dataset_id": {
+ "type": "keyword"
+ },
+ "data_nm": {
+ "type": "text",
+ "fields": {
+ "korean_analyzer": {
+ "type": "text",
+ "analyzer": "korean_analyzer",
+ "search_analyzer": "standard"
+ }
+ },
+ "fielddata": true
+ },
+ "data_desc": {
+ "type": "text",
+ "fields": {
+ "korean_analyzer": {
+ "type": "text",
+ "analyzer": "korean_analyzer",
+ "search_analyzer": "standard"
+ }
+ }
+ },
+ "ctgry": {
+ "type": "text",
+ "fields": {
+ "korean_analyzer": {
+ "type": "text",
+ "analyzer": "korean_analyzer",
+ "search_analyzer": "standard"
+ }
+ }
+ },
+ "ctgry_id": {
+ "type": "keyword"
+ },
+ "src_url": {
+ "type": "text",
+ "fields": {
+ "korean_analyzer": {
+ "type": "text",
+ "analyzer": "korean_analyzer",
+ "search_analyzer": "standard"
+ }
+ }
+ },
+ "kywrd": {
+ "type": "text",
+ "fields": {
+ "korean_analyzer": {
+ "type": "text",
+ "analyzer": "korean_analyzer",
+ "search_analyzer": "standard"
+ }
+ }
+ },
+ "reg_date": {
+ "type": "date"
+ },
+ "recnt_amd_date": {
+ "type": "date"
+ },
+ "lnk_date": {
+ "type": "date"
+ },
+ "prv_forml": {
+ "type": "text",
+ "fields": {
+ "korean_analyzer": {
+ "type": "text",
+ "analyzer": "korean_analyzer",
+ "search_analyzer": "standard"
+ }
+ }
+ },
+ "data_eng_nm": {
+ "type": "text",
+ "fields": {
+ "korean_analyzer": {
+ "type": "text",
+ "analyzer": "korean_analyzer",
+ "search_analyzer": "standard"
+ }
+ }
+ },
+ "data_type": {
+ "type": "text",
+ "fields": {
+ "korean_analyzer": {
+ "type": "text",
+ "analyzer": "korean_analyzer",
+ "search_analyzer": "standard"
+ }
+ }
+ },
+ "data_clas": {
+ "type": "text",
+ "fields": {
+ "korean_analyzer": {
+ "type": "text",
+ "analyzer": "korean_analyzer",
+ "search_analyzer": "standard"
+ }
+ }
+ },
+ "law_review_ncst_yn": {
+ "type": "keyword"
+ },
+ "secur_review_ncst_yn": {
+ "type": "keyword"
+ },
+ "data_upd_cycl": {
+ "type": "keyword"
+ },
+ "tkcgr": {
+ "type": "keyword"
+ },
+ "tkcg_dept": {
+ "type": "keyword"
+ },
+ "rqtr": {
+ "type": "keyword"
+ },
+ "rqt_dept": {
+ "type": "keyword"
+ },
+ "retv_num": {
+ "type": "integer"
+ },
+ "intrst_data_num": {
+ "type": "integer"
+ },
+ "downl_num": {
+ "type": "integer"
+ },
+ "src_sys": {
+ "type": "text",
+ "fields": {
+ "korean_analyzer": {
+ "type": "text",
+ "analyzer": "korean_analyzer",
+ "search_analyzer": "standard"
+ }
+ }
+ },
+ "file_size": {
+ "type": "text",
+ "fields": {
+ "korean_analyzer": {
+ "type": "text",
+ "analyzer": "korean_analyzer",
+ "search_analyzer": "standard"
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/API-SERVICE/ELKSearch/conf/template/exTemplate.html b/API-SERVICE/ELKSearch/conf/template/exTemplate.html
new file mode 100644
index 00000000..b6e856ec
--- /dev/null
+++ b/API-SERVICE/ELKSearch/conf/template/exTemplate.html
@@ -0,0 +1,41 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+ TITLE
+
+ |
+
+
+ |
+
+ |
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/API-SERVICE/ServiceApiList/__init__.py b/API-SERVICE/ServiceApiList/__init__.py
new file mode 100644
index 00000000..4e889787
--- /dev/null
+++ b/API-SERVICE/ServiceApiList/__init__.py
@@ -0,0 +1,2 @@
+from .meta import *
+from .common import *
diff --git a/API-SERVICE/ServiceApiList/common/__init__.py b/API-SERVICE/ServiceApiList/common/__init__.py
new file mode 100644
index 00000000..b6e690fd
--- /dev/null
+++ b/API-SERVICE/ServiceApiList/common/__init__.py
@@ -0,0 +1 @@
+from . import *
diff --git a/API-SERVICE/ServiceApiList/common/commonExecute.py b/API-SERVICE/ServiceApiList/common/commonExecute.py
new file mode 100644
index 00000000..c6109ced
--- /dev/null
+++ b/API-SERVICE/ServiceApiList/common/commonExecute.py
@@ -0,0 +1,67 @@
+from typing import Dict, List, Optional
+from pydantic import BaseModel
+from ServiceUtils.CommonUtil import connect_db, get_exception_info, convert_data
+
+
+class commonExecute(BaseModel):
+ method: str
+ table_nm: str
+ data: Dict
+ key: Optional[List[str]] = None
+
+
+def make_insert_query(excute: commonExecute):
+ columns = ", ".join(excute.data.keys())
+ values = ", ".join(map(convert_data, excute.data.values()))
+ return f"INSERT INTO {excute.table_nm} ({columns}) VALUES ({values});"
+
+
+def make_update_query(excute: commonExecute):
+ where = []
+ update_data = [
+ f"{key} = {convert_data(value)}" for key, value in excute.data.items()
+ ]
+ for key in excute.key:
+ where.append(f"{key} = {convert_data(excute.data.get(key))}")
+ return f'UPDATE {excute.table_nm} SET {",".join(update_data)}\
+ WHERE {" AND ".join(where)};'
+
+
+def make_delete_query(excute: commonExecute):
+ where = []
+ for key in excute.key:
+ where.append(f"{key} = {convert_data(excute.data.get(key))}")
+ return f'DELETE FROM {excute.table_nm} WHERE {" AND ".join(where)};'
+
+
+def make_execute_query(excute: commonExecute):
+ method = excute.method
+ query = None
+ if method == "INSERT":
+ query = make_insert_query(excute)
+ elif method == "UPDATE":
+ query = make_update_query(excute)
+ elif method == "DELETE":
+ query = make_delete_query(excute)
+ else:
+ raise ValueError(f"Invalid Method. ({method}))")
+ return query
+
+
+def api(excute_list: List[commonExecute]) -> Dict:
+ query_list = []
+ try:
+ for excute in excute_list:
+ query_list.append(make_execute_query(excute))
+
+ db = connect_db()
+ time_zone = "Asia/Seoul"
+ db.execute(f"SET TIMEZONE={convert_data(time_zone)}")
+ db.multiple_excute(query_list)
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ result = {"result": 1, "errorMessage": ""}
+
+ return result
diff --git a/API-SERVICE/ServiceApiList/common/commonLogout.py b/API-SERVICE/ServiceApiList/common/commonLogout.py
new file mode 100644
index 00000000..21b28da5
--- /dev/null
+++ b/API-SERVICE/ServiceApiList/common/commonLogout.py
@@ -0,0 +1,27 @@
+from typing import Dict
+from fastapi.logger import logger
+from fastapi.responses import JSONResponse
+from jose import jwt
+from starlette.requests import Request
+
+from ServiceUtils import CommonUtil as utils
+from ApiService.ApiServiceConfig import config
+
+
+def api(request: Request) -> Dict:
+ f_delete = True
+ try:
+ token = utils.get_token_from_cookie(request)
+ payload = utils.jwt_decode(token)
+ user = utils.get_user_info(payload)
+ except Exception:
+ except_name = utils.get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ # f_delete = False # 쿠기를 삭제하지 않으면 user-docean-access-token에 None 값이 들어가고 이는 Exception 발생을 야기
+ else:
+ result = {"result": 1, "errorMessage": ""}
+ response = JSONResponse(content=result)
+ if f_delete:
+ response.delete_cookie(key=config.secret_info["cookie_name"])
+ response.delete_cookie(key=config.secret_info["knime_cookie_name"])
+ return response
diff --git a/API-SERVICE/ServiceApiList/common/commonSelect.py b/API-SERVICE/ServiceApiList/common/commonSelect.py
new file mode 100644
index 00000000..e085a907
--- /dev/null
+++ b/API-SERVICE/ServiceApiList/common/commonSelect.py
@@ -0,0 +1,149 @@
+from typing import Dict, List, Optional
+from pydantic import BaseModel
+from fastapi.logger import logger
+from fastapi.requests import Request
+
+from ApiService.ApiServiceConfig import config
+from ServiceUtils import CommonUtil as utils
+
+
+class joinInfo(BaseModel):
+ table_nm: str
+ key: str
+
+
+class subWhereInfo(BaseModel):
+ table_nm: str
+ key: str
+ value: str
+ compare_op: str
+ op: Optional[str] = ""
+
+
+class whereInfo(BaseModel):
+ table_nm: str
+ key: str
+ value: str
+ compare_op: str
+ op: Optional[str] = ""
+ sub: Optional[List[subWhereInfo]] = None
+
+
+class orderInfo(BaseModel):
+ table_nm: str
+ key: str
+ order: str
+
+
+class pageInfo(BaseModel):
+ per_page: int
+ cur_page: int
+
+
+class commonSelect(BaseModel):
+ table_nm: str
+ key: Optional[str] = None
+ join_info: Optional[joinInfo] = None
+ where_info: Optional[List[whereInfo]] = None
+ order_info: Optional[orderInfo] = None
+ page_info: Optional[pageInfo] = None
+
+
+def convert_compare_op(compare_str):
+ if compare_str == "Equal":
+ compare_op = "="
+ elif compare_str == "Not Equal":
+ compare_op = "!="
+ elif compare_str == "Greater Than":
+ compare_op = ">"
+ elif compare_str == "Greater Than or Equal":
+ compare_op = ">="
+ elif compare_str == "Less Than":
+ compare_op = "<"
+ elif compare_str == "Less Than or Equal":
+ compare_op = "<="
+ else:
+ compare_op = compare_str
+ return compare_op
+
+
+def make_where_value(where):
+ if where.compare_op == "IN" or where.compare_op == "NOT IN":
+ value_list = ", ".join(map(utils.convert_data, where.value.split(",")))
+ value = f"( {value_list} )"
+ elif where.compare_op in ["is", "is not"]:
+ value = where.value
+ else:
+ value = utils.convert_data(where.value)
+ return value
+
+
+def make_where_info(where_info: List[whereInfo]):
+ where = ""
+ for info in where_info:
+ value = make_where_value(info)
+ if info.sub:
+ sub_where = f"{info.table_nm}.{info.key} {convert_compare_op(info.compare_op)} {value}"
+ for sub_info in info.sub:
+ sub_value = make_where_value(sub_info)
+ sub_where = f"{sub_where} {sub_info.op} {sub_info.table_nm}.{sub_info.key} {convert_compare_op(sub_info.compare_op)} {sub_value}"
+ where = f"{where} {info.op} ({sub_where})"
+ else:
+ where = f"{where} {info.op} {info.table_nm}.{info.key} {convert_compare_op(info.compare_op)} {value}"
+ return f"WHERE {where}"
+
+
+def make_select_query(select_info: commonSelect):
+ join, where, order, page = "", "", "", ""
+ join_info, where_info, order_info, page_info = (
+ select_info.join_info,
+ select_info.where_info,
+ select_info.order_info,
+ select_info.page_info,
+ )
+ if join_info:
+ join = f"JOIN {join_info.table_nm} ON {select_info.table_nm}.{select_info.key} = {join_info.table_nm}.{join_info.key}"
+ if where_info:
+ where = make_where_info(where_info)
+ if order_info:
+ order = f"ORDER BY {order_info.table_nm}.{order_info.key} {order_info.order}"
+ if page_info:
+ page = f"LIMIT {page_info.per_page} OFFSET ({page_info.per_page} * {page_info.cur_page - 1})"
+
+ select_query = f"SELECT * FROM {select_info.table_nm} {join} {where} {order} {page};"
+ count_query = f"SELECT count(*) FROM {select_info.table_nm} {join} {where};"
+
+ return select_query, count_query
+
+
+def api(select_info: commonSelect, request: Request) -> Dict:
+ if select_info.table_nm == "user_bas":
+ token = utils.get_token_from_cookie(request)
+ payload = utils.jwt_decode(token)
+ user_type = payload["user_type"]
+
+ if not user_type == "SITE_ADMIN":
+ return {"result": 0, "errorMessage": "not allowed user"}
+
+ get_column_info = f"SELECT eng_nm, kor_nm FROM tbl_item_coln_dtl \
+ WHERE tbl_id = (SELECT tbl_id FROM tbl_item_bas WHERE tbl_nm = {utils.convert_data(select_info.table_nm)});"
+ get_query, total_cnt_query = make_select_query(select_info)
+ logger.info(f"Get Query : {get_query}")
+
+ try:
+ db = utils.connect_db()
+ select_data, _ = db.select(get_query)
+ if select_info.page_info:
+ total_cnt = db.select(total_cnt_query)
+ except Exception:
+ except_name = utils.get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ column_info, _ = db.select(get_column_info)
+ kor_nm_list = [map_data["kor_nm"] for map_data in column_info]
+ eng_nm_list = [map_data["eng_nm"] for map_data in column_info]
+ result = utils.make_res_msg(1, "", select_data, eng_nm_list, kor_nm_list)
+ if select_info.page_info:
+ result["data"].update(total_cnt[0][0])
+
+ return result
diff --git a/API-SERVICE/ServiceApiList/common/commonToken.py b/API-SERVICE/ServiceApiList/common/commonToken.py
new file mode 100644
index 00000000..39d346e7
--- /dev/null
+++ b/API-SERVICE/ServiceApiList/common/commonToken.py
@@ -0,0 +1,39 @@
+from typing import Dict
+from fastapi.logger import logger
+from fastapi.responses import JSONResponse
+from datetime import timedelta
+from jose import jwt
+from ServiceUtils import CommonUtil as utils
+from ServiceUtils.exceptions import TokenDoesNotExist, InvalidUserInfo
+from ApiService.ApiServiceConfig import config
+from starlette.requests import Request
+
+
+def api(request: Request) -> Dict:
+ access_token = ""
+ try:
+ token = utils.get_token_from_cookie(request)
+ payload = utils.jwt_decode(token)
+ user = utils.get_user_info(payload)
+ except Exception:
+ except_name = utils.get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ token_data = utils.make_token_data(user)
+ access_token = utils.create_token(
+ data=token_data,
+ expires_delta=timedelta(minutes=int(config.secret_info["expire_min"])),
+ secret_key=config.secret_info["secret_key"],
+ algorithm=config.secret_info["algorithm"],
+ )
+ result = {"result": 1, "errorMessage": ""}
+
+ response = JSONResponse(content=result)
+ response.set_cookie(
+ key=config.secret_info["cookie_name"],
+ value=access_token,
+ max_age=3600,
+ secure=False,
+ httponly=True,
+ )
+ return response
diff --git a/API-SERVICE/ServiceApiList/common/commonUserInfo.py b/API-SERVICE/ServiceApiList/common/commonUserInfo.py
new file mode 100644
index 00000000..6548da09
--- /dev/null
+++ b/API-SERVICE/ServiceApiList/common/commonUserInfo.py
@@ -0,0 +1,19 @@
+from typing import Dict
+from fastapi.logger import logger
+from ServiceUtils import CommonUtil as utils
+from starlette.requests import Request
+
+
+def api(request: Request) -> Dict:
+ try:
+ token = utils.get_token_from_cookie(request)
+ payload = utils.jwt_decode(token)
+ user = utils.get_user_info(payload)
+ logger.info(f"CommonUserInfo :: {user}")
+ except Exception:
+ except_name = utils.get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ result = {"result": 1, "errorMessage": "", "data": {"body": payload}}
+
+ return result
diff --git a/API-SERVICE/ServiceApiList/common/ldap/__init__.py b/API-SERVICE/ServiceApiList/common/ldap/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/API-SERVICE/ServiceApiList/common/ldap/checkOTP.py b/API-SERVICE/ServiceApiList/common/ldap/checkOTP.py
new file mode 100644
index 00000000..61bdc8b6
--- /dev/null
+++ b/API-SERVICE/ServiceApiList/common/ldap/checkOTP.py
@@ -0,0 +1,52 @@
+from fastapi.logger import logger
+from fastapi.responses import JSONResponse
+from datetime import timedelta
+
+from ServiceUtils.CommonUtil import (
+ make_token_data,
+ create_token,
+ connect_db,
+ convert_data,
+)
+from .otp_store import OTP
+from ServiceUtils.exceptions import InvalidUserInfo
+from ApiService.ApiServiceConfig import config
+from .schemas import TmpAuthUserBas
+
+
+class OTPMissMatch(Exception):
+ ...
+
+
+def api(id: str, otp: str):
+ try:
+ is_ok = OTP.check_otp(id, otp)
+ if not is_ok:
+ raise OTPMissMatch(f"invalid OTP :: {otp}")
+
+ db = connect_db()
+ user_info, _ = db.select(f"SELECT * FROM user_bas WHERE emp_id = {convert_data(id)};")
+ if not user_info:
+ raise InvalidUserInfo
+ user_info = user_info[0]
+ token_data = make_token_data(TmpAuthUserBas(**user_info).dict(by_alias=True))
+ access_token = create_token(
+ data=token_data,
+ expires_delta=timedelta(minutes=int(config.secret_info["expire_min"])),
+ secret_key=config.secret_info["secret_key"],
+ algorithm=config.secret_info["algorithm"],
+ )
+
+ response = JSONResponse(content={"result": 1, "errorMessage": ""})
+ response.set_cookie(
+ key=config.secret_info["cookie_name"],
+ value=access_token,
+ max_age=3600,
+ secure=False,
+ httponly=True,
+ )
+ except Exception as e:
+ logger.error(e)
+ response = JSONResponse(content={"result": 0, "errorMessage": str(e)})
+
+ return response
diff --git a/API-SERVICE/ServiceApiList/common/ldap/createOTP.py b/API-SERVICE/ServiceApiList/common/ldap/createOTP.py
new file mode 100644
index 00000000..48b62e5e
--- /dev/null
+++ b/API-SERVICE/ServiceApiList/common/ldap/createOTP.py
@@ -0,0 +1,45 @@
+from fastapi.responses import JSONResponse
+from fastapi.requests import Request
+from fastapi.logger import logger
+
+from .otp_store import OTP
+from .utils import ldap_info
+from ApiService.ApiServiceConfig import config
+from ServiceUtils.CommonUtil import knime_decrpyt
+from ServiceUtils.exceptions import TokenDoesNotExist, InvalidUserInfo
+
+
+async def api(id: str, request: Request):
+ try:
+ if config.secret_info["knime_cookie_name"] in request.cookies:
+ token = request.cookies[config.secret_info["knime_cookie_name"]]
+ data = knime_decrpyt(token)
+ if id not in data:
+ raise InvalidUserInfo(f"user {id} not authenticate")
+ else:
+ raise TokenDoesNotExist("TokenDoesNotExist")
+
+ ldap_user_info = await ldap_info(id)
+ mobile = ldap_user_info.mobile
+ otp = OTP.create()
+ OTP.add_otp(id, otp)
+
+ # sms to mobile
+ # TODO: ldap insert to table
+ """
+ insert into sdk_sms_send
+ (user_id, schedule_type, subject, sms_msg, callback_url, now_date, send_date, callback, dest_info, reserved1, reserved2)
+ values
+ ('ktsaup0519', '0', 'otp', '[사내접근제어 DOCEAN] 인증번호 [123456] 입니다. 3분안에 입력해주세요.', null,
+ date_format(now(), '%Y%m%d%H%i%S'), date_format(now()+2, '%Y%m%d%H%i%S'),
+ '15883391', '양석우', '01099858980', '991456', '91312828');
+ """
+
+ logger.info(f"CREATE OTP :: {otp}")
+ # TODO response의 otp 제거 (상용패치시)
+ response = JSONResponse(content={"result": 1, "errorMessage": "", "otp": otp})
+ except Exception as e:
+ logger.error(e)
+ response = JSONResponse(content={"result": 0, "errorMessage": str(e)})
+
+ return response
diff --git a/API-SERVICE/ServiceApiList/common/ldap/info.py b/API-SERVICE/ServiceApiList/common/ldap/info.py
new file mode 100644
index 00000000..6688faa6
--- /dev/null
+++ b/API-SERVICE/ServiceApiList/common/ldap/info.py
@@ -0,0 +1,18 @@
+from fastapi.requests import Request
+from fastapi.logger import logger
+from fastapi.responses import JSONResponse
+
+from ServiceUtils import CommonUtil as utils
+from .utils import ldap_info
+
+
+async def api(request: Request):
+ try:
+ user = utils.get_user_info(request)
+ ldap_user_info = await ldap_info(user["cmpno"])
+
+ response = JSONResponse(content={"result": 1, "errorMessage": "", "data": {"body": ldap_user_info.dict()}})
+ except Exception as e:
+ logger.error(e)
+ response = JSONResponse(content={"result": 0, "errorMessage": str(e)})
+ return response
diff --git a/API-SERVICE/ServiceApiList/common/ldap/login.py b/API-SERVICE/ServiceApiList/common/ldap/login.py
new file mode 100644
index 00000000..252fc0d3
--- /dev/null
+++ b/API-SERVICE/ServiceApiList/common/ldap/login.py
@@ -0,0 +1,35 @@
+from fastapi.requests import Request
+from fastapi.responses import JSONResponse
+from fastapi.logger import logger
+
+from .utils import ldap_auth, ldap_info
+from ServiceUtils.CommonUtil import kt_lamp, knime_encrypt
+from ApiService.ApiServiceConfig import config
+from .schemas import LoginInfo
+
+
+# login
+async def api(info: LoginInfo, request: Request):
+ transaction_id = request.headers.get("transactionId")
+ kt_lamp("OUT_REQ", transaction_id, "userLogin")
+
+ try:
+ if await ldap_auth(info.id, info.password):
+ ldap_user_info = await ldap_info(info.id)
+
+ knime_token = knime_encrypt(info.id + "|^|" + info.password)
+
+ response = JSONResponse(content={"result": 1, "errorMessage": "", "data": {"body": ldap_user_info.dict()}})
+ response.set_cookie(
+ key=config.secret_info["knime_cookie_name"],
+ value=knime_token,
+ max_age=3600,
+ secure=False,
+ httponly=True,
+ )
+ except Exception as e:
+ logger.error(e)
+ response = JSONResponse(content={"result": 0, "errorMessage": str(e)})
+
+ kt_lamp("OUT_RES", transaction_id, "userLogin", res_desc=f"{info.id}")
+ return response
diff --git a/API-SERVICE/ServiceApiList/common/ldap/otp_store.py b/API-SERVICE/ServiceApiList/common/ldap/otp_store.py
new file mode 100644
index 00000000..2a0e983e
--- /dev/null
+++ b/API-SERVICE/ServiceApiList/common/ldap/otp_store.py
@@ -0,0 +1,45 @@
+import hashlib
+import time
+import threading
+import random
+from fastapi.logger import logger
+
+
+class OTP:
+ otp_db = dict()
+
+ @classmethod
+ def get_hash(cls, data) -> str:
+ return hashlib.sha256(str(data).encode()).hexdigest()
+
+ @classmethod
+ def create(cls):
+ return "".join([str(random.randrange(10)) for i in range(6)])
+
+ @classmethod
+ def add_otp(cls, id: str, otp: str):
+ def del_expired_otp(otp_db, id):
+ time.sleep(180)
+ if id in otp_db:
+ del otp_db[id]
+ logger.info(f"expired otp :: {id}")
+
+ hash = cls.get_hash(otp)
+ cls.otp_db[id] = hash
+ logger.info(f"otp_store :: {cls.otp_db}")
+ threading.Thread(
+ daemon=True,
+ target=del_expired_otp,
+ args=(
+ cls.otp_db,
+ id,
+ ),
+ ).start()
+
+ @classmethod
+ def check_otp(cls, id: str, otp: str) -> bool:
+ hash = cls.get_hash(otp)
+ if id in cls.otp_db and cls.otp_db[id] == hash:
+ del cls.otp_db[id]
+ return True
+ return False
diff --git a/API-SERVICE/ServiceApiList/common/ldap/schemas.py b/API-SERVICE/ServiceApiList/common/ldap/schemas.py
new file mode 100644
index 00000000..8f154e2b
--- /dev/null
+++ b/API-SERVICE/ServiceApiList/common/ldap/schemas.py
@@ -0,0 +1,54 @@
+from datetime import datetime
+from pydantic import Field, BaseModel
+from typing import Optional, Union
+
+
+class LoginInfo(BaseModel):
+ id: str = Field(alias="cmpno", min_length=8, max_length=8)
+ password: str = Field(default=None)
+
+ class Config:
+ fields = {"password": {"exclude": True}}
+
+
+class UserBas(LoginInfo):
+ user_id: str
+ emp_id: str
+ user_nm: str
+ email: str
+ dept_nm: str
+ innt_aut_group_cd: Optional[str] = "ROLE_USER"
+ sttus: Optional[str] = "SBSC"
+ user_type: str
+
+
+class TmpAuthUserBas(UserBas):
+ tmp_aut_group_cd: Optional[str] = None
+ tmp_aut_alc_user: Optional[str] = None
+ tmp_aut_alc_date: Optional[datetime] = None
+ tmp_aut_exp_date: Optional[datetime] = None
+
+
+class LdapUserInfo(BaseModel):
+ {
+ "userName": "홍길동",
+ "deptCD": 481253,
+ "mobile": "010-6290-5249",
+ "deptName": "",
+ "agencyCD": 481226,
+ "agencyName": "",
+ "positionCD": "",
+ "positionName": "AI/BigData사업본부...",
+ "companyName": "KT협력사",
+ "email": "9132824@ktfriends.com",
+ }
+ user_name: str = Field(alias="userName")
+ mobile: str
+ dept_cd: int = Field(alias="deptCD")
+ dept_name: str = Field(alias="deptName")
+ agency_cd: int = Field(alias="agencyCD")
+ agency_name: str = Field(alias="agencyName")
+ position_cd: Union[int, str] = Field(alias="positionCD")
+ position_name: str = Field(alias="positionName")
+ company_name: str = Field(alias="companyName")
+ email: str
diff --git a/API-SERVICE/ServiceApiList/common/ldap/utils.py b/API-SERVICE/ServiceApiList/common/ldap/utils.py
new file mode 100644
index 00000000..2cb1f753
--- /dev/null
+++ b/API-SERVICE/ServiceApiList/common/ldap/utils.py
@@ -0,0 +1,100 @@
+from fastapi.logger import logger
+from typing import Union
+import asyncssh
+
+from ServiceUtils.crypto import AESCipher
+from ServiceUtils.exceptions import InvalidUserInfo
+from ApiService.ApiServiceConfig import config
+from .schemas import LdapUserInfo
+
+
+async def ldap_auth(id: str, pwd: str) -> bool:
+ output = await run_cmd(
+ config.ldap_info["host"],
+ int(config.ldap_info["port"]),
+ config.ldap_info["user"],
+ config.ldap_info["password"],
+ f"AUTH {id} {pwd}",
+ )
+ output = output.split(" ", 1)
+ if output[0] == "false":
+ raise InvalidUserInfo(output[1])
+ return True
+
+
+async def ldap_info(id: str) -> LdapUserInfo:
+ output = await run_cmd(
+ config.ldap_info["host"],
+ int(config.ldap_info["port"]),
+ config.ldap_info["user"],
+ config.ldap_info["password"],
+ f"UINFO {id}",
+ )
+ if type(output) == str:
+ raise InvalidUserInfo(output.split(" ", 1)[1])
+ return LdapUserInfo(**output)
+
+
+async def run_cmd(host: str, port: int, username: str, password: str, cmd: str) -> Union[str, dict]:
+ logger.info("remote call :: " + cmd)
+ # async with asyncssh.connect(host=host, port=port,
+ # username=username, password=password, known_hosts=None) as conn:
+ # logger.info(f'Run Cmd : {cmd}')
+ # result = await conn.run(cmd, check=True)
+ # logger.info(f'Command Result : {result.stdout}')
+ # return result.stdout
+ import random
+
+ if "AUTH" in cmd:
+ return random.choice(
+ [
+ "true",
+ "false [SYS_FAIL] AuthenticationException: [comment: 인증에 실패했습니다., data 0005",
+ ]
+ )
+ elif "UINFO" in cmd:
+ try:
+ return {
+ "12345678": {
+ "userName": "홍길동",
+ "deptCD": 481253,
+ "mobile": "010-6290-5249",
+ "deptName": "",
+ "agencyCD": 481226,
+ "agencyName": "",
+ "positionCD": "",
+ "positionName": "AI/BigData사업본부...",
+ "companyName": "KT협력사",
+ "email": "9132824@ktfriends.com",
+ },
+ "11181059": {
+ "userName": "고길동",
+ "deptCD": 481253,
+ "mobile": "010-6290-5249",
+ "deptName": "",
+ "agencyCD": 481226,
+ "agencyName": "",
+ "positionCD": "",
+ "positionName": "AI/BigData사업본부...",
+ "companyName": "KT협력사",
+ "email": "9132824@ktfriends.com",
+ },
+ "11181344": {
+ "userName": "나길동",
+ "deptCD": 481253,
+ "mobile": "010-6290-5249",
+ "deptName": "",
+ "agencyCD": 481226,
+ "agencyName": "",
+ "positionCD": "",
+ "positionName": "AI/BigData사업본부...",
+ "companyName": "KT협력사",
+ "email": "9132824@ktfriends.com",
+ },
+ }[cmd.split(" ")[1]]
+ except Exception:
+ return "false CredentialException: [comment: 존재하지 않는 사용자 계정입니다., data 0001]"
+
+
+def knime_encrypt(data: str, key: str):
+ return AESCipher(key).encrypt(data).decode()
diff --git a/API-SERVICE/ServiceApiList/common/userLogin.py b/API-SERVICE/ServiceApiList/common/userLogin.py
new file mode 100644
index 00000000..9f87c09c
--- /dev/null
+++ b/API-SERVICE/ServiceApiList/common/userLogin.py
@@ -0,0 +1,128 @@
+from typing import Dict, Optional
+from pydantic import BaseModel
+from fastapi import Request
+from fastapi.logger import logger
+from fastapi.responses import JSONResponse
+from datetime import datetime, timedelta
+from ServiceUtils.CommonUtil import (
+ get_exception_info,
+ connect_db,
+ convert_data,
+ create_token,
+ make_token_data,
+ kt_lamp,
+)
+from ApiService.ApiServiceConfig import config
+from ServiceUtils.crypto import AESCipher
+
+
+class userLogin(BaseModel):
+ """
+ {
+ "user_id":"e2851973-2239-4a44-8feb-00d5a3fb23ef",
+ "emp_id":"11181059",
+ "cmpno":"11181059",
+ "user_nm":"swyang",
+ "email":"swyang",
+ "dept_nm":"swyang",
+ "user_type":"SITE_USER"
+ },
+ {
+ "user_id":"0f25ac7d-abfe-41db-b6fb-6c1d929c95f8",
+ "emp_id":"11181344",
+ "cmpno":"11181344",
+ "user_nm":"테스터",
+ "email":"test@test.com",
+ "dept_nm":"테스터부서",
+ "user_type":"SITE_ADMIN"
+ }
+ """
+
+ user_id: str
+ password: str = "1234"
+ emp_id: str
+ cmpno: str
+ user_nm: str
+ email: str
+ dept_nm: str
+ innt_aut_group_cd: Optional[str] = "ROLE_USER"
+ sttus: Optional[str] = "SBSC"
+ user_type: str
+
+ class Config:
+ fields = {"password": {"exclude": True}}
+
+
+class TmpAuthUser(userLogin):
+ tmp_aut_group_cd: Optional[str] = None
+ tmp_aut_alc_user: Optional[str] = None
+ tmp_aut_alc_date: Optional[datetime] = None
+ tmp_aut_exp_date: Optional[datetime] = None
+
+
+def make_insert_query(login: dict):
+ login["reg_user"] = login["user_id"]
+ login["reg_date"] = "NOW()"
+ columns = ", ".join(login.keys())
+ values = ", ".join(map(convert_data, login.values()))
+ return f"INSERT INTO user_bas ({columns}) VALUES ({values});"
+
+
+def api(login: userLogin, request: Request) -> Dict:
+ transaction_id = request.headers.get("transactionId")
+ kt_lamp("OUT_REQ", transaction_id, "userLogin")
+
+ try:
+ db = connect_db()
+ user_info, _ = db.select(f"SELECT * FROM user_bas WHERE emp_id = {convert_data(login.emp_id)};")
+ if not user_info:
+ time_zone = "Asia/Seoul"
+ db.execute(f"SET TIMEZONE={convert_data(time_zone)}")
+ login_query = make_insert_query(login.dict())
+ db.execute(login_query)
+ user_info = login.dict()
+ else:
+ user_info = user_info[0]
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ # kt_lamp("OUT_RES", "userLogin", res_type="S",`
+ # res_code = "DC_ERROR", res_desc = f'{login.emp_id}.{except_name}')
+ else:
+ token_data = make_token_data(TmpAuthUser(**user_info).dict())
+ access_token = create_token(
+ data=token_data,
+ expires_delta=timedelta(minutes=int(config.secret_info["expire_min"])),
+ secret_key=config.secret_info["secret_key"],
+ algorithm=config.secret_info["algorithm"],
+ )
+
+ knime_token = knime_encrypt(
+ login.user_id + "|^|" + login.password,
+ config.secret_info["knime_secret_key"],
+ )
+
+ result = {"result": 1, "errorMessage": ""}
+
+ response = JSONResponse(content=result)
+ response.set_cookie(
+ key=config.secret_info["cookie_name"],
+ value=access_token,
+ max_age=3600,
+ secure=False,
+ httponly=True,
+ )
+ response.set_cookie(
+ key=config.secret_info["knime_cookie_name"],
+ value=knime_token,
+ max_age=3600,
+ secure=False,
+ httponly=True,
+ )
+
+ kt_lamp("OUT_RES", transaction_id, "userLogin", res_desc=f"{login.emp_id}")
+ return response
+
+
+def knime_encrypt(data: str, key: str):
+ return AESCipher(key).encrypt(data).decode()
diff --git a/API-SERVICE/ServiceApiList/meta/__init__.py b/API-SERVICE/ServiceApiList/meta/__init__.py
new file mode 100644
index 00000000..b6e690fd
--- /dev/null
+++ b/API-SERVICE/ServiceApiList/meta/__init__.py
@@ -0,0 +1 @@
+from . import *
diff --git a/API-SERVICE/ServiceApiList/meta/getBizMetaAsset.py b/API-SERVICE/ServiceApiList/meta/getBizMetaAsset.py
new file mode 100644
index 00000000..5238c1b2
--- /dev/null
+++ b/API-SERVICE/ServiceApiList/meta/getBizMetaAsset.py
@@ -0,0 +1,78 @@
+from copy import deepcopy
+from typing import Dict
+from datetime import datetime
+from ELKSearch.Manager.manager import ElasticSearchManager
+from ELKSearch.Utils.model import InputModel
+from ELKSearch.Utils.elasticsearch_utils import make_query, base_search_query
+from ELKSearch.Utils.database_utils import get_config
+from ServiceUtils.CommonUtil import get_exception_info
+from ApiService.ApiServiceConfig import config
+
+
+def api(input: InputModel) -> Dict:
+ index = "kt_biz_asset"
+ els_config = get_config(config.root_path, "config.ini")[config.db_type[:-3]]
+ from_ = input.from_ - 1
+ data_dict = dict()
+
+ try:
+ if input.chk and len(input.searchOption):
+ with open(f"{config.root_path}/log/{config.category}/{datetime.today().strftime('%Y%m%d')}_search.log","a") as fp:
+ for search in input.searchOption:
+ fp.write(f"{str(search.keywords)}\n")
+
+ es = ElasticSearchManager(page=from_, size=input.size,
+ index=index, **els_config)
+ es.set_sort(input.sortOption)
+
+ action = "query"
+ sub_action = "must"
+ for item in input.searchOption:
+ tmp = []
+ for field in item.field:
+ if field in ["data_nm", "data_desc"]:
+ col = field + ".korean_analyzer"
+ else:
+ col = field
+ tmp.append(col)
+ item.field = tmp
+ query_dict = base_search_query(action, sub_action, input.searchOption)
+
+ sub_action = "filter"
+ item_dict = base_search_query(action, sub_action, input.filterOption)
+ query_dict.update(item_dict)
+ search_query = make_query(action, "bool", query_dict)
+ es.body.update(search_query)
+
+ sort_list = [{item.field: item.order} for item in input.sortOption]
+ es.set_sort(sort_list)
+ search_data = es.search(input.resultField)
+
+ # count
+ body = deepcopy(es.body)
+ del body["sort"]
+ data_dict["A"] = es.conn.count(index="kt_biz_asset", body=body)["count"]
+
+ # assets index count y
+ data_type = make_query(
+ "match", "conts_dataset_reg_yn", {"operator": "OR", "query": "Y"}
+ )
+ body["query"]["bool"]["filter"].append(data_type)
+ data_dict["C"] = es.conn.count(index="kt_biz_asset", body=body)["count"]
+
+ # meta index assets
+ body = deepcopy(es.body)
+ del body["sort"]
+ data_dict["M"] = es.conn.count(index="kt_biz_data", body=body)["count"]
+
+ data_dict["totalCount"] = sum(data_dict.values())
+
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ search_list = [data["_source"] for data in search_data["hits"]["hits"]]
+ data_dict["searchList"] = search_list
+ result = {"result": 1, "errorMessage": "", "data": data_dict}
+
+ return result
diff --git a/API-SERVICE/ServiceApiList/meta/getBizMetaContent.py b/API-SERVICE/ServiceApiList/meta/getBizMetaContent.py
new file mode 100644
index 00000000..283d2b8d
--- /dev/null
+++ b/API-SERVICE/ServiceApiList/meta/getBizMetaContent.py
@@ -0,0 +1,77 @@
+from copy import deepcopy
+from typing import Dict
+from datetime import datetime
+from ELKSearch.Manager.manager import ElasticSearchManager
+from ELKSearch.Utils.model import InputModel
+from ELKSearch.Utils.elasticsearch_utils import make_query, base_search_query
+from ELKSearch.Utils.database_utils import get_config
+from ServiceUtils.CommonUtil import get_exception_info
+from ApiService.ApiServiceConfig import config
+
+
+def api(input: InputModel) -> Dict:
+ index = "kt_biz_asset"
+ els_config = get_config(config.root_path, "config.ini")[config.db_type[:-3]]
+ from_ = input.from_ - 1
+ data_dict = dict()
+
+ try:
+ if input.chk and len(input.searchOption):
+ with open(f"{config.root_path}/log/{config.category}/{datetime.today().strftime('%Y%m%d')}_search.log","a") as fp:
+ for search in input.searchOption:
+ fp.write(f"{str(search.keywords)}\n")
+
+ es = ElasticSearchManager(page=from_, size=input.size,
+ index=index, **els_config)
+ es.set_sort(input.sortOption)
+
+ action = "query"
+ sub_action = "must"
+ for item in input.searchOption:
+ tmp = []
+ for field in item.field:
+ if field in ["data_nm", "data_desc"]:
+ col = field + ".korean_analyzer"
+ else:
+ col = field
+ tmp.append(col)
+ item.field = tmp
+ query_dict = base_search_query(action, sub_action, input.searchOption)
+
+ sub_action = "filter"
+ item_dict = base_search_query(action, sub_action, input.filterOption)
+ query_dict.update(item_dict)
+ search_query = make_query(action, "bool", query_dict)
+ es.body.update(search_query)
+ data_type = make_query(
+ "match", "conts_dataset_reg_yn", {"operator": "OR", "query": "Y"}
+ )
+ es.body["query"]["bool"]["filter"].append(data_type)
+
+ sort_list = [{item.field: item.order} for item in input.sortOption]
+ es.set_sort(sort_list)
+ search_data = es.search(input.resultField)
+
+ # assets index count y
+ body = deepcopy(es.body)
+ del body["sort"]
+ data_dict["C"] = es.conn.count(index="kt_biz_asset", body=body)["count"]
+
+ # assets index assets n = n+y
+ body["query"]["bool"]["filter"] = body["query"]["bool"]["filter"][:-1]
+ data_dict["A"] = es.conn.count(index="kt_biz_asset", body=body)["count"]
+
+ # meta index count
+ data_dict["M"] = es.conn.count(index="kt_biz_data", body=body)["count"]
+
+ data_dict["totalCount"] = sum(data_dict.values())
+
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ search_list = [data["_source"] for data in search_data["hits"]["hits"]]
+ data_dict["searchList"] = search_list
+ result = {"result": 1, "errorMessage": "", "data": data_dict}
+
+ return result
diff --git a/API-SERVICE/ServiceApiList/meta/getBizMetaData.py b/API-SERVICE/ServiceApiList/meta/getBizMetaData.py
new file mode 100644
index 00000000..30586ef5
--- /dev/null
+++ b/API-SERVICE/ServiceApiList/meta/getBizMetaData.py
@@ -0,0 +1,77 @@
+from copy import deepcopy
+from typing import Dict
+from datetime import datetime
+from ELKSearch.Manager.manager import ElasticSearchManager
+from ELKSearch.Utils.model import InputModel
+from ELKSearch.Utils.elasticsearch_utils import make_query, base_search_query
+from ELKSearch.Utils.database_utils import get_config
+from ServiceUtils.CommonUtil import get_exception_info
+from ApiService.ApiServiceConfig import config
+
+
+def api(input: InputModel) -> Dict:
+ index = "kt_biz_data"
+ els_config = get_config(config.root_path, "config.ini")[config.db_type[:-3]]
+ from_ = input.from_ - 1
+ data_dict = dict()
+
+ try:
+ if input.chk and len(input.searchOption):
+ with open(f"{config.root_path}/log/{config.category}/{datetime.today().strftime('%Y%m%d')}_search.log","a") as fp:
+ for search in input.searchOption:
+ fp.write(f"{str(search.keywords)}\n")
+
+ es = ElasticSearchManager(page=from_, size=input.size,
+ index=index, **els_config)
+ es.set_sort(input.sortOption)
+
+ action = "query"
+ sub_action = "must"
+ for item in input.searchOption:
+ tmp = []
+ for field in item.field:
+ if field in ["data_nm", "data_desc"]:
+ col = field + ".korean_analyzer"
+ else:
+ col = field
+ tmp.append(col)
+ item.field = tmp
+ query_dict = base_search_query(action, sub_action, input.searchOption)
+
+ sub_action = "filter"
+ item_dict = base_search_query(action, sub_action, input.filterOption)
+ query_dict.update(item_dict)
+ search_query = make_query(action, "bool", query_dict)
+ es.body.update(search_query)
+
+ sort_list = [{item.field: item.order} for item in input.sortOption]
+ es.set_sort(sort_list)
+ search_data = es.search(input.resultField)
+
+ body = deepcopy(es.body)
+ del body["sort"]
+ data_dict["A"] = es.conn.count(index="kt_biz_asset", body=body)["count"]
+
+ # assets index count n
+ data_type = make_query(
+ "match", "conts_dataset_reg_yn", {"operator": "OR", "query": "Y"}
+ )
+ body["query"]["bool"]["filter"].append(data_type)
+ data_dict["C"] = es.conn.count(index="kt_biz_asset", body=body)["count"]
+
+ # meta index count
+ body = deepcopy(es.body)
+ del body["sort"]
+ data_dict["M"] = es.conn.count(index="kt_biz_data", body=body)["count"]
+
+ data_dict["totalCount"] = sum(data_dict.values())
+
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ search_list = [data["_source"] for data in search_data["hits"]["hits"]]
+ data_dict["searchList"] = search_list
+ result = {"result": 1, "errorMessage": "", "data": data_dict}
+
+ return result
diff --git a/API-SERVICE/ServiceApiList/meta/getElsBizMetaList.py b/API-SERVICE/ServiceApiList/meta/getElsBizMetaList.py
new file mode 100644
index 00000000..a18d488c
--- /dev/null
+++ b/API-SERVICE/ServiceApiList/meta/getElsBizMetaList.py
@@ -0,0 +1,75 @@
+from typing import Dict
+from ELKSearch.Manager.manager import ElasticSearchManager
+from ELKSearch.Utils.model import InputModel
+from ELKSearch.Utils.elasticsearch_utils import make_query, base_search_query
+from ELKSearch.Utils.database_utils import get_config
+from ServiceUtils.CommonUtil import get_exception_info
+from ApiService.ApiServiceConfig import config
+
+
+def api(input: InputModel) -> Dict:
+ els_config = get_config(config.root_path, "config.ini")["kt"]
+ data_dict = dict()
+ from_ = input.from_ - 1
+ data_type = {
+ # search_keyword: (result_key, result_data)
+ "C": "contentsCount",
+ "A": "assetsCount",
+ "M": "metaCount",
+ "T": "totalCount",
+ }
+ try:
+ es = ElasticSearchManager(page=from_, size=input.size, **els_config)
+ es.set_sort(input.sortOption)
+
+ action = "query"
+ sub_action = "must"
+ for item in input.searchOption:
+ if item.field in ["data_nm", "data_desc"]:
+ item.field = item.field + ".korean_analyzer"
+ query_dict = base_search_query(action, sub_action, input.searchOption)
+
+ sub_action = "filter"
+ item_dict = base_search_query(action, sub_action, input.filterOption)
+ query_dict.update(item_dict)
+ search_query = make_query(action, "bool", query_dict)
+ es.body.update(search_query)
+
+ sort_list = [{item.field: item.order} for item in input.sortOption]
+ es.set_sort(sort_list)
+ search_data = es.search(input.resultField)
+
+ i = None
+ for j, item in enumerate(item_dict["filter"]):
+ if "data_type" in item["match"].keys():
+ i = j
+ break
+ else:
+ i = None
+
+ for key_nm, eng_nm in data_type.items():
+ if i is None:
+ cnt_query = make_query(
+ "match", "data_type", {"operator": "OR", "query": key_nm}
+ )
+ item_dict["filter"].append(cnt_query)
+ i = -1
+ else:
+ item_dict["filter"][i]["match"]["data_type"]["query"] = key_nm
+
+ if key_nm == "T":
+ del item_dict["filter"][i]
+
+ query_dict.update(item_dict)
+ cnt_query = make_query("query", "bool", query_dict)
+ cnt = es.conn.count(index=es.index, body=cnt_query)["count"]
+ data_dict[eng_nm] = cnt
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ search_list = [data["_source"] for data in search_data["hits"]["hits"]]
+ data_dict["searchList"] = search_list
+ result = {"result": 1, "errorMessage": "", "data": data_dict}
+
+ return result
diff --git a/API-SERVICE/ServiceApiList/meta/getPrefixBizMeta.py b/API-SERVICE/ServiceApiList/meta/getPrefixBizMeta.py
new file mode 100644
index 00000000..a8b47e62
--- /dev/null
+++ b/API-SERVICE/ServiceApiList/meta/getPrefixBizMeta.py
@@ -0,0 +1,39 @@
+from typing import Dict
+from pydantic import BaseModel
+from ELKSearch.Manager.manager import ElasticSearchManager
+from ServiceUtils.CommonUtil import get_exception_info
+from ELKSearch.Utils.database_utils import get_config
+from ApiService.ApiServiceConfig import config
+
+
+class Prefix(BaseModel):
+ size: int
+ keyword: str
+
+
+def api(input: Prefix) -> Dict:
+ """
+ Auto Complete data_nm
+ DB의 Like 검색과 유사함
+ :param keyword: type dict, ex) {"data_name" : "테"}
+ :return:
+ """
+ index = ["kt_biz_data", "kt_biz_asset"]
+ els_config = get_config(config.root_path, "config.ini")[config.db_type[:-3]]
+ field = "data_nm"
+ query = {field: input.keyword}
+ try:
+ es = ElasticSearchManager(index=index,**els_config)
+ es.size = input.size
+ prefix_data = es.prefix(query,[field])
+
+ except Exception:
+ except_name = get_exception_info()
+ result = {"result": 0, "errorMessage": except_name}
+ else:
+ prefix_data = [
+ data["_source"]["data_nm"] for data in prefix_data["hits"]["hits"]
+ ]
+ result = {"result": 1, "errorMessage": "", "data": prefix_data}
+
+ return result
diff --git a/API-SERVICE/ServiceConnectManager/PostgresManager.py b/API-SERVICE/ServiceConnectManager/PostgresManager.py
new file mode 100644
index 00000000..38e1d593
--- /dev/null
+++ b/API-SERVICE/ServiceConnectManager/PostgresManager.py
@@ -0,0 +1,52 @@
+import psycopg2
+from typing import List, Dict, Tuple, Any
+from ApiService.ApiServiceConfig import config
+from fastapi.logger import logger
+
+
+class PostgresManager:
+ def __init__(self) -> None:
+ self.conn = self.connect()
+ self.cursor = self.conn.cursor()
+
+ def connect(self):
+ conn = config.conn_pool.getconn()
+
+ logger.info("PostgresManager Connect.")
+ return conn
+
+ def execute(self, sql: str) -> None:
+ self.cursor.execute(sql)
+ self.conn.commit()
+ logger.info(f"PostgresManager Execute Result. ({sql})")
+
+ def multiple_excute(self, sql_list: list) -> None:
+ try:
+ for index, sql in enumerate(sql_list):
+ logger.info(f"PostgresManager Multiple Execute. ({index}. {sql})")
+ self.cursor.execute(sql)
+ self.conn.commit()
+ except (Exception, psycopg2.DatabaseError):
+ self.conn.rollback()
+ raise psycopg2.DatabaseError
+
+ def select(self, sql: str, count: int = None) -> Tuple[List[Dict[Any, Any]], List[Any]]:
+ self.execute(sql)
+ column_names = [desc[0] for desc in self.cursor.description]
+ if count is None:
+ rows = self.cursor.fetchall()
+ else:
+ rows = self.cursor.fetchmany(count)
+ # logger.info(f'PostgresManager Select Execute. ({sql})')
+
+ result = []
+ for row in rows:
+ result.append(dict(zip(column_names, row)))
+ return result, column_names
+
+ def commit(self):
+ self.conn.commit()
+
+ def __del__(self) -> None:
+ self.cursor.close()
+ config.conn_pool.putconn(self.conn)
diff --git a/API-SERVICE/ServiceConnectManager/__init__.py b/API-SERVICE/ServiceConnectManager/__init__.py
new file mode 100644
index 00000000..b5c0391e
--- /dev/null
+++ b/API-SERVICE/ServiceConnectManager/__init__.py
@@ -0,0 +1 @@
+from .PostgresManager import *
diff --git a/API-SERVICE/ServiceUtils/CommonUtil.py b/API-SERVICE/ServiceUtils/CommonUtil.py
new file mode 100644
index 00000000..c1434e1a
--- /dev/null
+++ b/API-SERVICE/ServiceUtils/CommonUtil.py
@@ -0,0 +1,264 @@
+from datetime import datetime, timedelta
+from ServiceUtils.exceptions import TokenDoesNotExist, InvalidUserInfo
+from pytz import timezone
+import configparser
+import argparse
+import traceback
+from fastapi.logger import logger
+from fastapi.requests import Request
+from typing import Any, Optional, Dict
+from ApiService.ApiServiceConfig import config
+from ServiceConnectManager import PostgresManager
+from psycopg2 import pool
+import sys
+from jose import jwt
+import traceback
+import logging
+from ServiceUtils.crypto import AESCipher
+
+lamp = logging.getLogger("trace")
+
+
+def knime_encrypt(data: str):
+ return AESCipher(config.secret_info["knime_secret_key"]).encrypt(data).decode()
+
+
+def knime_decrpyt(data: str):
+ return AESCipher(config.secret_info["knime_secret_key"]).decrypt(data).decode()
+
+
+def get_token_from_cookie(request: Request):
+ recv_access_token = request.cookies.get(config.secret_info["cookie_name"])
+ if not recv_access_token:
+ raise TokenDoesNotExist
+ return recv_access_token
+
+
+def jwt_decode(token):
+ return jwt.decode(
+ token=token,
+ key=config.secret_info["secret_key"],
+ algorithms=config.secret_info["algorithm"],
+ )
+
+
+def get_user_info(payload):
+ username = payload[config.user_info["id_column"]]
+ user = get_user(username)
+ if not user[0]:
+ raise InvalidUserInfo
+ user = user[0][0]
+
+ return user
+
+
+def convert_data(data) -> str:
+ data = str(data)
+ if data:
+ if data == "now()" or data == "NOW()":
+ return data
+ if data[0] == "`":
+ return data[1:]
+ return f"'{data.strip()}'"
+
+
+def set_log_path():
+ parser = configparser.ConfigParser()
+ parser.read(f"{config.root_path}/conf/{config.category}/logging.conf", encoding="utf-8")
+
+ parser.set(
+ "handler_rotatingFileHandler",
+ "args",
+ f"('{config.root_path}/log/{config.category}/{config.category}.log', 'a', 20000000, 10)",
+ )
+
+ with open(f"{config.root_path}/conf/{config.category}/logging.conf", "w") as f:
+ parser.write(f)
+
+
+def get_config(config_name: str):
+ ano_cfg = {}
+
+ conf = configparser.ConfigParser()
+ config_path = config.root_path + f"/conf/{config.category}/{config_name}"
+ conf.read(config_path, encoding="utf-8")
+ for section in conf.sections():
+ ano_cfg[section] = {}
+ for option in conf.options(section):
+ ano_cfg[section][option] = conf.get(section, option)
+
+ return ano_cfg
+
+
+def parser_params() -> Any:
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--host", type=str, default="127.0.0.1")
+ parser.add_argument("--port", type=int, default=19000)
+ parser.add_argument("--category", default="meta")
+ parser.add_argument("--db_type", default="test")
+
+ return parser.parse_args()
+
+
+def prepare_config(root_path) -> None:
+ args = parser_params()
+ config.root_path = root_path
+ config.category = args.category
+ api_router_cfg = get_config("config.ini")
+ config.api_config = get_config("api_config.ini")
+ config.server_host = args.host
+ config.server_port = args.port
+ config.db_type = f"{args.db_type}_db"
+ config.db_info = api_router_cfg[config.db_type]
+ config.lamp_info = api_router_cfg["lamp_info"]
+ config.conn_pool = make_connection_pool(config.db_info)
+ if config.category == "common":
+ config.secret_info = api_router_cfg["secret_info"]
+ config.user_info = api_router_cfg["user_info"]
+ config.ldap_info = api_router_cfg["ldap_info"]
+
+
+def make_connection_pool(db_info):
+ conn_pool = pool.SimpleConnectionPool(
+ 1,
+ 20,
+ user=db_info["user"],
+ password=db_info["password"],
+ host=db_info["host"],
+ port=db_info["port"],
+ database=db_info["database"],
+ options=f'-c search_path={db_info["schema"]}',
+ connect_timeout=10,
+ )
+ return conn_pool
+
+
+def connect_db():
+ db = PostgresManager()
+ return db
+
+
+def save_file_for_reload():
+ with open(__file__, "a") as fd:
+ fd.write(" ")
+
+
+def make_res_msg(result, err_msg, data=None, column_names=None, kor_column_names=None):
+ header_list = []
+ for index, column_name in enumerate(column_names):
+ if kor_column_names:
+ header = {
+ "column_name": column_name,
+ "kor_column_name": kor_column_names[index],
+ }
+ else:
+ header = {"column_name": column_name}
+ header_list.append(header)
+
+ if data is None or column_names is None:
+ res_msg = {"result": result, "errorMessage": err_msg}
+ else:
+ res_msg = {
+ "result": result,
+ "errorMessage": err_msg,
+ "data": {"body": data, "header": header_list},
+ }
+ return res_msg
+
+
+def get_exception_info():
+ ex_type, ex_value, ex_traceback = sys.exc_info()
+ trace_back = traceback.extract_tb(ex_traceback)
+ trace_log = "\n".join([str(trace) for trace in trace_back])
+ logger.error(
+ f"\n- Exception Type : {ex_type}\n- Exception Message : {str(ex_value).strip()}\n- Exception Log : \n{trace_log}"
+ )
+ return ex_type.__name__
+
+
+def convert_error_message(exception_name: str):
+ error_message = None
+ if exception_name == "UniqueViolation":
+ error_message = "UNIQUE_VIOLATION"
+ else:
+ error_message = exception_name
+
+ return error_message
+
+
+##### for user info #####
+class IncorrectUserName(Exception):
+ pass
+
+
+class IncorrectPassword(Exception):
+ pass
+
+
+def get_user(user_name: str, user_type: str = None):
+ db = connect_db()
+ query = (
+ f'SELECT * FROM {config.user_info["table"]} WHERE {config.user_info["id_column"]} = {convert_data(user_name)}'
+ )
+ query += f" and user_type = '{user_type}'" if user_type else ""
+ user = db.select(query)
+ return user
+
+
+def create_token(data: dict, secret_key, algorithm, expires_delta: Optional[timedelta] = None):
+ to_encode = data.copy()
+ if expires_delta:
+ expire = datetime.now(timezone("Asia/Seoul")) + expires_delta
+ else:
+ expire = datetime.now(timezone("Asia/Seoul")) + timedelta(minutes=15)
+
+ logger.info(f"commonToken Expire : {expire}")
+ to_encode.update({"exp": expire})
+
+ return jwt.encode(to_encode, secret_key, algorithm=algorithm)
+
+
+def make_token_data(user: Dict) -> Dict:
+ token_data_column = config.secret_info["token_data_column"].split(",")
+ # token_data = {column: user[column] for column in token_data_column}
+ token_data = {
+ column: datetime.strftime(user[column], "%Y-%m-%d %H:%M:%S.%f")
+ if isinstance(user[column], datetime)
+ else user[column]
+ for column in token_data_column
+ }
+ logger.info(token_data)
+ return token_data
+
+
+def kt_lamp(
+ log_type: str,
+ transaction_id: str,
+ operation: str,
+ res_type: str = "I",
+ res_code: str = "",
+ res_desc: str = "",
+):
+ lamp_form = {}
+ now = datetime.now()
+ lamp_form["timestamp"] = now.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]
+ lamp_form["service"] = config.lamp_info["service_code"]
+ lamp_form["operation"] = f'{config.lamp_info["prefix"]}_{operation}'
+ lamp_form["transactionId"] = transaction_id
+ lamp_form["logType"] = log_type
+
+ lamp_form["host"] = {}
+ lamp_form["host"]["name"] = config.lamp_info["host_name"]
+ lamp_form["host"]["ip"] = config.lamp_info["host_ip"]
+
+ if log_type == "OUT_REQ":
+ lamp_form["destination"] = {}
+ lamp_form["destination"]["name"] = config.lamp_info["dest_name"]
+ lamp_form["destination"]["ip"] = config.lamp_info["dest_ip"]
+ elif log_type == "OUT_RES" or log_type == "IN_RES":
+ lamp_form["response"] = {}
+ lamp_form["response"]["type"] = res_type
+ lamp_form["response"]["code"] = res_code
+ lamp_form["response"]["desc"] = res_desc
+
+ lamp.info(lamp_form)
diff --git a/API-SERVICE/ServiceUtils/__init__.py b/API-SERVICE/ServiceUtils/__init__.py
new file mode 100644
index 00000000..3b5ce1cc
--- /dev/null
+++ b/API-SERVICE/ServiceUtils/__init__.py
@@ -0,0 +1 @@
+from .CommonUtil import *
diff --git a/API-SERVICE/ServiceUtils/crypto.py b/API-SERVICE/ServiceUtils/crypto.py
new file mode 100644
index 00000000..0ea078a1
--- /dev/null
+++ b/API-SERVICE/ServiceUtils/crypto.py
@@ -0,0 +1,22 @@
+from hashlib import md5
+from base64 import b64decode
+from base64 import b64encode
+
+from Crypto.Cipher import AES
+from Crypto.Random import get_random_bytes
+from Crypto.Util.Padding import pad, unpad
+
+
+class AESCipher:
+ def __init__(self, key):
+ self.key = md5(key.encode("utf8")).digest()
+
+ def encrypt(self, data):
+ iv = get_random_bytes(AES.block_size)
+ self.cipher = AES.new(self.key, AES.MODE_CBC, iv)
+ return b64encode(iv + self.cipher.encrypt(pad(data.encode("utf-8"), AES.block_size)))
+
+ def decrypt(self, data):
+ raw = b64decode(data)
+ self.cipher = AES.new(self.key, AES.MODE_CBC, raw[: AES.block_size])
+ return unpad(self.cipher.decrypt(raw[AES.block_size :]), AES.block_size)
diff --git a/API-SERVICE/ServiceUtils/exceptions.py b/API-SERVICE/ServiceUtils/exceptions.py
new file mode 100644
index 00000000..98fb70e5
--- /dev/null
+++ b/API-SERVICE/ServiceUtils/exceptions.py
@@ -0,0 +1,6 @@
+class InvalidUserInfo(Exception):
+ pass
+
+
+class TokenDoesNotExist(Exception):
+ pass
diff --git a/API-SERVICE/build.sh b/API-SERVICE/build.sh
new file mode 100755
index 00000000..64a3a3ef
--- /dev/null
+++ b/API-SERVICE/build.sh
@@ -0,0 +1,8 @@
+rm -rf build dist mobigen_service.egg-info
+pip uninstall mobigen_service -y
+
+python setup.py bdist_wheel
+
+pip install ./dist/mobigen_service-0.5-py3-none-any.whl
+
+
diff --git a/API-SERVICE/conf/common/api_config.ini b/API-SERVICE/conf/common/api_config.ini
new file mode 100644
index 00000000..e3f0714a
--- /dev/null
+++ b/API-SERVICE/conf/common/api_config.ini
@@ -0,0 +1,52 @@
+[commonSelect]
+method = POST
+url = /portal/api/common/commonSelect
+sub_dir = common
+
+[commonExecute]
+method = POST
+url = /portal/api/common/commonExecute
+sub_dir = common
+
+[userLogin]
+method = POST
+url = /portal/api/common/user/userLogin
+sub_dir = common
+
+[commonLogout]
+method = POST
+url = /portal/api/common/user/commonLogout
+sub_dir = common
+
+[commonToken]
+method = GET
+url = /portal/api/common/user/commonToken
+sub_dir = common
+
+[commonUserInfo]
+method = GET
+url = /portal/api/common/user/commonUserInfo
+sub_dir = common
+
+[login]
+method = POST
+url = /portal/api/common/login
+sub_dir = common.ldap
+
+[info]
+method = GET
+url = /portal/api/common/info
+sub_dir = common.ldap
+
+[createOTP]
+method = POST
+url = /portal/api/common/otp
+sub_dir = common.ldap
+
+[checkOTP]
+method = GET
+url = /portal/api/common/otp
+sub_dir = common.ldap
+
+
+
diff --git a/API-SERVICE/conf/common/config.ini b/API-SERVICE/conf/common/config.ini
new file mode 100644
index 00000000..6f841e4e
--- /dev/null
+++ b/API-SERVICE/conf/common/config.ini
@@ -0,0 +1,42 @@
+[test_db]
+host = 192.168.100.126
+port = 25432
+user = dpmanager
+password = hello.dp12#$
+database = ktportal
+schema = users,meta,sitemng,board,analysis,sysconfig
+
+[commercial_db]
+host = 10.220.184.63
+port = 5432
+user = dpmanager
+password = 22DOCean.@!~
+database = ktportal
+schema = users,meta,sitemng,board,analysis,sysconfig
+
+[secret_info]
+secret_key = 09d25e094faa6ca2556c818166b7a9563b93f7099f6f0f4caa6cf63b88e8d3e7
+algorithm = HS256
+expire_min = 30
+cookie_name = user-docean-access-token
+token_data_column = user_id,emp_id,cmpno,user_nm,email,dept_nm,innt_aut_group_cd,user_type,tmp_aut_group_cd,tmp_aut_alc_user,tmp_aut_alc_date,tmp_aut_exp_date
+knime_cookie_name = knime_auth_token
+knime_secret_key = docean_knime_auth_256
+
+[user_info]
+table = user_bas
+id_column = emp_id
+
+[lamp_info]
+service_code = PG016701
+prefix = service
+host_name = portal
+host_ip = 192.168.100.126
+dest_name = portal
+dest_ip = 192.168.100.126
+
+[ldap_info]
+host = ...
+port = 22
+user = ...
+password = ...
diff --git a/API-SERVICE/conf/common/logging.conf b/API-SERVICE/conf/common/logging.conf
new file mode 100644
index 00000000..77fe8d64
--- /dev/null
+++ b/API-SERVICE/conf/common/logging.conf
@@ -0,0 +1,43 @@
+[loggers]
+keys = root,trace
+
+[logger_root]
+level = INFO
+handlers = console,rotatingFileHandler
+
+[logger_trace]
+level = INFO
+qualname = trace
+handlers = traceHandler
+propagate = 0
+
+[formatters]
+keys = default,trace
+
+[formatter_default]
+format = %(asctime)s %(levelname)s [%(filename)s:%(lineno)d] - %(message)s
+
+[formatter_trace]
+format = %(message)s
+
+[handlers]
+keys = console,rotatingFileHandler,traceHandler
+
+[handler_console]
+class = StreamHandler
+args = (sys.stdout,)
+formatter = default
+level = INFO
+
+[handler_rotatingFileHandler]
+class = handlers.RotatingFileHandler
+formatter = default
+args = ('/Users/swyang/Desktop/workspace/API_ROUTER_KT/API-SERVICE/log/common/common.log', 'a', 20000000, 10)
+level = INFO
+
+[handler_traceHandler]
+class = handlers.RotatingFileHandler
+formatter = trace
+args = ('Common-Trace.log', 'a', 20000000, 10)
+level = INFO
+
diff --git a/API-SERVICE/conf/meta/api_config.ini b/API-SERVICE/conf/meta/api_config.ini
new file mode 100644
index 00000000..39792797
--- /dev/null
+++ b/API-SERVICE/conf/meta/api_config.ini
@@ -0,0 +1,24 @@
+[getBizMetaData]
+method = POST
+url = /portal/api/meta/getBizMetaData
+sub_dir = meta
+
+[getBizMetaContent]
+method = POST
+url = /portal/api/meta/getBizMetaContent
+sub_dir = meta
+
+[getBizMetaAsset]
+method = POST
+url = /portal/api/meta/getBizMetaAsset
+sub_dir = meta
+
+[getPrefixBizMeta]
+method = POST
+url = /portal/api/meta/getPrefixBizMeta
+sub_dir = meta
+
+[getElsBizMetaList]
+method = POST
+url = /portal/api/meta/getElsBizMetaList
+sub_dir = meta
diff --git a/API-SERVICE/conf/meta/config.ini b/API-SERVICE/conf/meta/config.ini
new file mode 100644
index 00000000..aae1468d
--- /dev/null
+++ b/API-SERVICE/conf/meta/config.ini
@@ -0,0 +1,23 @@
+[test_db]
+host = 192.168.100.126
+port = 25432
+user = dpme
+password = hello.meta12#$
+database = ktportal
+schema = meta
+
+[commercial_db]
+host = 10.220.184.63
+port = 5432
+user = dpme
+password = 22DOCean.@!~
+database = ktportal
+schema = meta
+
+[lamp_info]
+service_code = PG016701
+prefix = service
+host_name = host
+host_ip = 192.168.101.44
+dest_name = dest
+dest_ip = 192.168.100.126
diff --git a/API-SERVICE/conf/meta/logging.conf b/API-SERVICE/conf/meta/logging.conf
new file mode 100644
index 00000000..6229c99f
--- /dev/null
+++ b/API-SERVICE/conf/meta/logging.conf
@@ -0,0 +1,43 @@
+[loggers]
+keys = root,trace
+
+[logger_root]
+level = INFO
+handlers = console,rotatingFileHandler
+
+[logger_trace]
+level = INFO
+qualname = trace
+handlers = traceHandler
+propagate = 0
+
+[formatters]
+keys = default,trace
+
+[formatter_default]
+format = %(asctime)s %(levelname)s [%(filename)s:%(lineno)d] - %(message)s
+
+[formatter_trace]
+format = %(asctime)s %(message)s
+
+[handlers]
+keys = console,rotatingFileHandler,traceHandler
+
+[handler_console]
+class = StreamHandler
+args = (sys.stdout,)
+formatter = default
+level = INFO
+
+[handler_rotatingFileHandler]
+class = handlers.RotatingFileHandler
+formatter = default
+args = ('/Users/cbc/DEV/Mobigen/API_DataPortal/KT/AP_API_Router/API-SERVICE/log/meta/meta.log', 'a', 20000000, 10)
+level = INFO
+
+[handler_traceHandler]
+class = handlers.RotatingFileHandler
+formatter = trace
+args = ('Meta-Trace.log', 'a', 20000000, 10)
+level = INFO
+
diff --git a/API-SERVICE/data_insert.py b/API-SERVICE/data_insert.py
new file mode 100644
index 00000000..c4a92a6c
--- /dev/null
+++ b/API-SERVICE/data_insert.py
@@ -0,0 +1,71 @@
+import os
+import uuid
+import sqlalchemy
+import pandas as pd
+from pathlib import Path
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker, scoped_session
+from sqlalchemy.ext.declarative import declarative_base
+from ELKSearch.Utils.database_utils import prepare_config
+# root_path = str(Path(os.path.dirname(os.path.abspath(__file__))))
+# prepare_config(root_path)
+
+
+def get_table(table_name,session):
+ return sqlalchemy.Table(table_name,sqlalchemy.MetaData(),autoload=True,autoload_with=session.get_bind())
+
+
+"""
+d-ocean sample data 입력
+
+[default 값]
+p.key = 기타
+datatype = 기본
+src_sys = d-ocean (전부 대문자로 치환)
+data_upd_cycle = M
+
+[ID 값으로 변환]
+kywrd = 텍스트로 그냥 입력
+ctgry - meta_change_ctgry_dtl
+prv_forml - biz_meta_fltr_bas
+src_sys - biz_meta_fltr_bas
+
+"""
+insert_db = create_engine(f"postgresql://dpme:hello.meta12#$@192.168.100.126:25432/ktportal",
+ connect_args={'options': '-csearch_path=meta'})
+
+# insert_db = create_engine(f"postgresql://postgres:0312@localhost:5432/ktportal",
+# connect_args={'options': '-csearch_path=meta'})
+
+sess = scoped_session(sessionmaker(autocommit=True, autoflush=False, bind=insert_db))
+base = declarative_base()
+
+data = pd.DataFrame(pd.read_excel("./d-ocean_sample.xlsx"))
+print(data.head())
+print(data.columns)
+
+####### 샘플 데이터 수정 #######
+
+
+####### 데이터 DB INSERT #######
+table_name = "test"
+table_name = table_name.lower()
+
+
+data.columns = [col.lower() for col in list(data.columns)]
+table = get_table(table_name,sess)
+columns = table.columns.keys()
+
+data = data.replace('', None)
+for col in list(data.columns):
+ data[col] = data[col].astype(str)
+ if col not in columns:
+ sess.execute('ALTER TABLE {} ADD {} TEXT'.format(table_name, col))
+
+del data["bm"]
+del data["pkey"]
+del data["datatype"]
+data["biz_dataset_id"] = [uuid.uuid4() for i in range(0,len(data))]
+
+with insert_db.connect() as conn:
+ data.to_sql(table_name,con=conn,if_exists='replace', index=False, index_label=False)
diff --git a/API-SERVICE/els_update.py b/API-SERVICE/els_update.py
new file mode 100644
index 00000000..a1def5bf
--- /dev/null
+++ b/API-SERVICE/els_update.py
@@ -0,0 +1,51 @@
+import os
+from pathlib import Path
+from datetime import datetime
+from elasticsearch import helpers
+from ELKSearch.Utils.database_utils import prepare_config, connect_db, select, config
+
+root_path = str(Path(os.path.dirname(os.path.abspath(__file__))))
+prepare_config(root_path)
+
+
+def main():
+ today = datetime.today().date()
+ bulk_meta_item = list()
+ prepare_config(root_path)
+ es = config.es
+ db = connect_db()
+
+ if config.category == "data":
+ table_name = "vw_ifs_tbl_txn"
+ condition = f"WHERE DATE(tbl_first_cret_dt) > DATE('{today}')" \
+ f"OR DATE(tbl_last_chg_dt) >= DATE('{today}')"
+ else:
+ table_name = "vw_assets_biz_meta_bas"
+ condition = f"WHERE DATE(amd_date) > DATE('{today}')" \
+ f"OR DATE(reg_date) >= DATE('{today}')"
+
+ db_query = f"SELECT * FROM {table_name} "
+
+ if config.check == "True":
+ db_query = db_query + condition
+
+ meta_wrap_list = select(db, db_query)[0]
+
+ try:
+ for meta_wrap in meta_wrap_list:
+ els_dict = dict()
+ if config.category != "data":
+ if meta_wrap["upd_pam_date"]:
+ meta_wrap["upd_pam_date"] = datetime.strptime(meta_wrap["upd_pam_date"], '%Y-%m-%d')
+ els_dict["_id"] = meta_wrap["biz_dataset_id"]
+ els_dict["_source"] = meta_wrap
+ els_dict["_source"]["biz_dataset_id"] = meta_wrap["biz_dataset_id"]
+ # es.insert(meta_wrap,meta_wrap["biz_dataset_id"])
+ bulk_meta_item.append(els_dict)
+ helpers.bulk(es.conn, bulk_meta_item, index=es.index)
+ except Exception as e:
+ print(e)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/API-SERVICE/ranking_word.py b/API-SERVICE/ranking_word.py
new file mode 100644
index 00000000..f61cf4a6
--- /dev/null
+++ b/API-SERVICE/ranking_word.py
@@ -0,0 +1,45 @@
+import os
+import re
+import ast
+from pathlib import Path
+from datetime import datetime
+from collections import Counter
+from ELKSearch.Utils.database_utils import prepare_config, connect_db, select, config, execute
+root_path = str(Path(os.path.dirname(os.path.abspath(__file__))))
+
+
+# todo: 이재중 책임님께 ranking용 db table 추가 명세 작성후 요청
+def main():
+ """
+ param:
+ parameter는 els_update.py 에서 공통으로 사용
+ - db_type: conf/config.ini or ELKSearch/conf/db_config.ini
+ - check: type str, False or True, True=누적,False=갱신
+
+ """
+ today = datetime.today().date().strftime('%Y%m%d')
+ prepare_config(root_path)
+ db = connect_db()
+
+ # 검색어 로그 불러오기
+ search_file_name = f"{root_path}/log/meta/{today}_search.log"
+ with open(search_file_name,"r") as fp:
+ search_log_file = fp.read().split("\n")[:-1]
+
+ today_search_word = []
+ for words in search_log_file:
+ result = [word for word in ast.literal_eval(words)]
+ today_search_word = today_search_word + result
+
+ query = "INSERT INTO srhwd_find_tmscnt_sum VALUES "
+ values = ""
+ if len(today_search_word):
+ for word, cnt in Counter(today_search_word).items():
+ item = f"('{word}',{cnt},'{datetime.today().date()}'),"
+ values = values + item
+ query = query + values[:-1]
+ execute(db,db.cursor(), query)
+
+
+if __name__ == "__main__":
+ main()
\ No newline at end of file
diff --git a/API-SERVICE/safe_start.sh b/API-SERVICE/safe_start.sh
new file mode 100644
index 00000000..fe6650f3
--- /dev/null
+++ b/API-SERVICE/safe_start.sh
@@ -0,0 +1,66 @@
+app_name=API-Service
+router_host=$1
+router_port=$2
+category=$3
+router_db=$4
+
+input() {
+ if [[ $router_host == "" ]];then
+ router_host=192.168.100.126
+ fi
+ if [[ $router_port == "" ]];then
+ router_port=9014
+ fi
+ if [[ $category == "" ]];then
+ category=meta
+ fi
+ if [[ $router_db == "" ]];then
+ router_db=test
+ fi
+}
+
+router_stop() {
+ app=$( ps -ef | grep python | grep server.py | grep ${router_host} | grep ${router_port} | awk '{print $2}' )
+ if [[ $app != "" ]];then
+ exit_app="kill -9 ${app}"
+ echo "Stop Command ( router ) : "${exit_app}
+ $exit_app
+ else
+ echo "Not Found application. ( router )"
+ fi
+}
+
+uvicorn_stop() {
+ uvicorn=$( netstat -nlp | grep ${router_host}':'${router_port} | awk '{print $7}' | tr "/" "\n" )
+ if [[ $uvicorn != "" ]];then
+ for i in $uvicorn
+ do
+ if [[ ${i} == *python* ]];then
+ continue
+ fi
+ exit_uvicorn="kill -9 ${i}"
+ echo "Stop Command ( uvicorn ) : "${exit_uvicorn}
+ $exit_uvicorn
+ done
+ else
+ echo "Not Found application. ( uvicorn )"
+ fi
+}
+
+router_start() {
+ source_path="$( cd "$( dirname "$0" )" && pwd -P )"
+ router_exec="nohup python3.8 ${source_path}/server.py --host ${router_host} --port ${router_port} --category ${category} --db_type ${router_db} 1> /dev/null 2>&1 &"
+ echo "Start Command : ${router_exec}"
+ nohup python3.8 ${source_path}/server.py --host ${router_host} --port ${router_port} --category ${category} --db_type ${router_db} 1> /dev/null 2>&1 &
+}
+
+echo "########## Safe Start (${app_name}) ##########"
+echo "========== STOP ${app_name} =========="
+input
+
+router_stop
+sleep 2
+uvicorn_stop
+
+echo "========== START ${app_name} =========="
+router_start
diff --git a/API-SERVICE/send_email.py b/API-SERVICE/send_email.py
new file mode 100644
index 00000000..398d759a
--- /dev/null
+++ b/API-SERVICE/send_email.py
@@ -0,0 +1,58 @@
+import os
+import smtplib
+from pathlib import Path
+from email.mime.text import MIMEText
+from email.mime.multipart import MIMEMultipart
+from ELKSearch.Utils.database_utils import prepare_config, connect_db, select, execute, config
+
+root_path = str(Path(os.path.dirname(os.path.abspath(__file__))))
+prepare_config(root_path)
+
+
+def main():
+ """
+ :argument
+ category = email
+ db_type = email_db
+ """
+ # batch 1분에 한번씩 email을 전송하고 status를 req에서 send로 변경한다
+ query = "SELECT * FROM email_dsp_hst WHERE sttus = 'REQ'"
+ db = connect_db()
+ send_list = select(db, query)[0]
+
+ from_addr = config.els_info["from_addr"]
+ host = config.els_info["host"]
+ port = config.els_info["port"]
+
+ for email_info in send_list:
+ try:
+ message = MIMEMultipart("alternative")
+ message["Subject"] = email_info['title']
+ message["From"] = from_addr
+ message["To"] = email_info['rcv_adr']
+
+ with open(f'{config.root_path}/ELKSearch/conf/template/exTemplate.html', "r") as fd:
+ html = "\n".join(fd.readlines())
+
+ html = html.replace("TITLE", email_info['title'])
+ html = html.replace("SBST", email_info['sbst'])
+ html_part = MIMEText(html, "html")
+ message.attach(html_part)
+
+ with smtplib.SMTP(host, port) as smtp:
+ # smtp.sendmail(from_addr,email_info['rcv_adr'],message)
+ smtp.send_message(message)
+ except Exception as e:
+ print(0)
+ print(e)
+ else:
+ # update status
+ print(1)
+ query = f"UPDATE email_dsp_hst SET sttus = 'SEND'" \
+ f"WHERE email_id = '{email_info['email_id']}'"
+ execute(db,db.cursor(),query)
+ break
+
+
+if __name__ == "__main__":
+ main()
diff --git a/API-SERVICE/server.py b/API-SERVICE/server.py
new file mode 100644
index 00000000..db89ea16
--- /dev/null
+++ b/API-SERVICE/server.py
@@ -0,0 +1,31 @@
+from fastapi import FastAPI
+import uvicorn
+from pathlib import Path
+from ApiService.ApiServiceConfig import config
+from ServiceUtils.CommonUtil import prepare_config, set_log_path
+from ApiService import ApiService
+import os
+
+root_path = str(Path(os.path.dirname(os.path.abspath(__file__))))
+prepare_config(root_path)
+api_router = ApiService()
+app = FastAPI()
+app.include_router(api_router.router)
+
+
+if __name__ == "__main__":
+ log_dir = f"{config.root_path}/log/{config.category}"
+ if os.path.isdir(log_dir):
+ print(f"Directory Exists")
+ else:
+ print(f"Make log dir : {log_dir}")
+ os.makedirs(log_dir)
+
+ set_log_path()
+ uvicorn.run(
+ "server:app",
+ host=config.server_host,
+ port=config.server_port,
+ reload=True,
+ log_config=f"{config.root_path}/conf/{config.category}/logging.conf",
+ )
diff --git a/API-SERVICE/setup.py b/API-SERVICE/setup.py
new file mode 100644
index 00000000..5f37cfa8
--- /dev/null
+++ b/API-SERVICE/setup.py
@@ -0,0 +1,10 @@
+from setuptools import setup, find_packages
+
+setup(
+ name="mobigen_service",
+ version="0.5",
+ author="mobigen",
+ author_email="cbccbs@mobigen.co.kr",
+ python_requires=">=3.6",
+ packages=find_packages(exclude=["docs", "tests*", "__pycache__/"]),
+)
diff --git a/API-SERVICE/start.sh b/API-SERVICE/start.sh
new file mode 100644
index 00000000..484bfd8f
--- /dev/null
+++ b/API-SERVICE/start.sh
@@ -0,0 +1,33 @@
+app_name=API-Service
+router_host=$1
+router_port=$2
+category=$3
+router_db=$4
+
+input() {
+ if [[ $router_host == "" ]];then
+ router_host=192.168.100.126
+ fi
+ if [[ $router_port == "" ]];then
+ router_port=9014
+ fi
+ if [[ $category == "" ]];then
+ category=meta
+ fi
+ if [[ $router_db == "" ]];then
+ router_db=test
+ fi
+}
+
+router_start() {
+ source_path="$( cd "$( dirname "$0" )" && pwd -P )"
+ router_exec="nohup python3.8 ${source_path}/server.py --host ${router_host} --port ${router_port} --category ${category} --db_type ${router_db} 1> /dev/null 2>&1 &"
+ echo "Start Command : ${router_exec}"
+ nohup python3.8 ${source_path}/server.py --host ${router_host} --port ${router_port} --category ${category} --db_type ${router_db} 1> /dev/null 2>&1 &
+}
+
+echo "########## Start Application (${app_name}) ##########"
+echo "========== START ${app_name} =========="
+input
+
+router_start
diff --git a/API-SERVICE/stop.sh b/API-SERVICE/stop.sh
new file mode 100644
index 00000000..e4227863
--- /dev/null
+++ b/API-SERVICE/stop.sh
@@ -0,0 +1,47 @@
+app_name=API-Service
+router_host=$1
+router_port=$2
+
+input() {
+ if [[ $router_host == "" ]];then
+ router_host=192.168.100.126
+ fi
+ if [[ $router_port == "" ]];then
+ router_port=9014
+ fi
+}
+
+router_stop() {
+ app=$( ps -ef | grep python | grep server.py | grep ${router_host} | grep ${router_port} | awk '{print $2}' )
+ if [[ $app != "" ]];then
+ exit_app="kill -9 ${app}"
+ echo "Stop Command ( router ) : "${exit_app}
+ $exit_app
+ else
+ echo "Not Found application. ( router )"
+ fi
+}
+
+uvicorn_stop() {
+ uvicorn=$( netstat -nlp | grep ${router_host}':'${router_port} | awk '{print $7}' | tr "/" "\n" )
+ if [[ $uvicorn != "" ]];then
+ for i in $uvicorn
+ do
+ if [[ ${i} == *python* ]];then
+ continue
+ fi
+ exit_uvicorn="kill -9 ${i}"
+ echo "Stop Command ( uvicorn ) : "${exit_uvicorn}
+ $exit_uvicorn
+ done
+ else
+ echo "Not Found application. ( uvicorn )"
+ fi
+}
+
+echo "########## Stop Application (${app_name}) ##########"
+echo "========== STOP ${app_name} =========="
+input
+router_stop
+sleep 2
+uvicorn_stop
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 00000000..680e041b
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,12 @@
+ fastapi==0.83.0
+ uvicorn==0.16.0
+ psycopg2-binary==2.9.3
+ asyncssh==2.12.0
+ aiohttp==3.8.3
+ elasticsearch==7.17.6
+ mobigen-router
+ mobigen-service
+pydantic~=1.10.2
+starlette~=0.19.1
+setuptools~=58.1.0
+pytz~=2022.6
\ No newline at end of file