diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..8453c31c --- /dev/null +++ b/.gitignore @@ -0,0 +1,8 @@ +/venv/ +__pycache__/ +.DS_Store +.idea +*.log +.vscode +.python-* +.coverage \ No newline at end of file diff --git a/API-ROUTER/ApiList/service/TEST_1.py b/API-ROUTER/ApiList/service/TEST_1.py new file mode 100644 index 00000000..b90ecf46 --- /dev/null +++ b/API-ROUTER/ApiList/service/TEST_1.py @@ -0,0 +1,7 @@ +from typing import Dict +from ApiRoute.ApiRouteConfig import config + + +def api(api_name: str) -> Dict: + + return {"API_NAME": "TEST_1"} diff --git a/API-ROUTER/ApiList/service/TEST_2.py b/API-ROUTER/ApiList/service/TEST_2.py new file mode 100644 index 00000000..b90ecf46 --- /dev/null +++ b/API-ROUTER/ApiList/service/TEST_2.py @@ -0,0 +1,7 @@ +from typing import Dict +from ApiRoute.ApiRouteConfig import config + + +def api(api_name: str) -> Dict: + + return {"API_NAME": "TEST_1"} diff --git a/API-ROUTER/ApiRoute/ApiRoute.py b/API-ROUTER/ApiRoute/ApiRoute.py new file mode 100644 index 00000000..0668ac7b --- /dev/null +++ b/API-ROUTER/ApiRoute/ApiRoute.py @@ -0,0 +1,127 @@ +from fastapi.logger import logger +from typing import Dict, List +import importlib.util +from fastapi import APIRouter +from ApiRoute.ApiRouteConfig import config +from Utils.CommonUtil import ( + connect_db, + save_file_for_reload, + get_exception_info, + delete_headers, +) +from Utils.RouteUtil import ( + bypass_msg, + call_remote_func, + get_api_info, + make_route_response, +) +from pydantic import BaseModel +from starlette.requests import Request +from urllib import parse + + +class ApiServerInfo(BaseModel): + nm: str + ip_adr: str + domn_nm: str + + +class ApiParam(BaseModel): + api_nm: str + nm: str + data_type: str + deflt_val: str + + +class ApiInfo(BaseModel): + api_nm: str + ctgry: str + route_url: str + url: str + meth: str + cmd: str + mode: str + params: List[ApiParam] + + +class ApiRoute: + def __init__(self) -> None: + self.router = APIRouter() + self.set_route() + + def set_route(self) -> None: + self.router.add_api_route( + "/api/reload", self.reload_api, methods=["GET"], tags=["API Info Reload"] + ) + self.router.add_api_route( + "/route/common/me", self.get_client_ip, methods=["GET"] + ) + + db = connect_db() + config.api_info, _ = db.select("SELECT * FROM tb_api_info;") + config.api_params, _ = db.select("SELECT * FROM tb_api_params;") + config.api_server_info, _ = db.select("SELECT * FROM tb_api_server_info") + + for api in config.api_info: + method = str(api["meth"]).split(",") + self.router.add_api_route( + api["route_url"], + self.route_api, + methods=method, + tags=[f'Route Category ({api["ctgry"]})'], + ) + + for api_name, conf_api_info in config.api_config.items(): + module_path = ( + f'{config.root_path}/ApiList/{conf_api_info["sub_dir"]}/{api_name}.py' + ) + module_name = "api" + spec = importlib.util.spec_from_file_location(module_name, module_path) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + self.router.add_api_route( + f'{conf_api_info["url_prefix"]}/{conf_api_info["sub_dir"]}/{api_name}', + module.api, + methods=[conf_api_info["method"]], + tags=["service"], + ) + + def get_client_ip(self, request: Request): + return {"result": 1, "errorMessage": "", "data": request.scope["client"][0]} + + def reload_api(self): + logger.info("Reload API Info") + save_file_for_reload() + result = {"result": 1, "errorMessage": ""} + return result + + async def route_api(self, request: Request) -> Dict: + route_url = request.url.path + method = request.method + access_token = "" + body = None + headers = delete_headers( + dict(request.headers), ["content-length", "user-agent"] + ) + try: + api_info, api_params = get_api_info(route_url) + if method == "POST": + body = await request.json() + params_query = parse.unquote(str(request.query_params)) + + logger.info( + f"\n- api_info : {api_info}\n- api_params : {api_params} \ + \n- req body : {body}, params_query : {params_query}" + ) + + if api_info["mode"] == "MESSAGE PASSING": + result, access_token = await bypass_msg( + api_info, params_query, body, headers + ) + else: + result = await call_remote_func(api_info, api_params, body) + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + + return make_route_response(result, api_info["api_nm"], access_token) diff --git a/API-ROUTER/ApiRoute/ApiRouteConfig.py b/API-ROUTER/ApiRoute/ApiRouteConfig.py new file mode 100644 index 00000000..92132ab6 --- /dev/null +++ b/API-ROUTER/ApiRoute/ApiRouteConfig.py @@ -0,0 +1,26 @@ +from typing import Dict, List +from psycopg2 import pool + + +class ApiRouteConfig: + root_path: str + + db_type: str + db_info: Dict + + remote_info: Dict + + server_host: str + server_port: int + + api_config: Dict + + api_server_info: List[Dict] + api_info: List[Dict] + api_params: List[Dict] + + secret_info: Dict + conn_pool: pool.SimpleConnectionPool + + +config = ApiRouteConfig diff --git a/API-ROUTER/ApiRoute/__init__.py b/API-ROUTER/ApiRoute/__init__.py new file mode 100644 index 00000000..c1dfef31 --- /dev/null +++ b/API-ROUTER/ApiRoute/__init__.py @@ -0,0 +1,2 @@ +from .ApiRouteConfig import * +from .ApiRoute import * diff --git a/API-ROUTER/ConnectManager/PostgresManager.py b/API-ROUTER/ConnectManager/PostgresManager.py new file mode 100644 index 00000000..514a6b25 --- /dev/null +++ b/API-ROUTER/ConnectManager/PostgresManager.py @@ -0,0 +1,57 @@ +import psycopg2 +from typing import List, Dict, Tuple, Any +from fastapi.logger import logger +from ApiRoute.ApiRouteConfig import config + + +class PostgresManager: + def __init__(self) -> None: + self.conn = self.connect() + self.cursor = self.conn.cursor() + + def connect(self): + conn = config.conn_pool.getconn() + + logger.info("PostgresManager Connect.") + return conn + + def execute(self, sql: str) -> None: + try: + self.cursor.execute(sql) + self.conn.commit() + except (Exception, psycopg2.DatabaseError): + self.conn.rollback() + raise psycopg2.DatabaseError + + def multiple_excute(self, sql_list: list) -> None: + try: + for index, sql in enumerate(sql_list): + logger.info(f"PostgresManager Multiple Execute. ({index}. {sql})") + self.cursor.execute(sql) + self.conn.commit() + except (Exception, psycopg2.DatabaseError): + self.conn.rollback() + raise psycopg2.DatabaseError + + def select( + self, sql: str, count: int = None + ) -> Tuple[List[Dict[Any, Any]], List[Any]]: + self.execute(sql) + column_names = [desc[0] for desc in self.cursor.description] + if count is None: + rows = self.cursor.fetchall() + else: + rows = self.cursor.fetchmany(count) + logger.info(f"PostgresManager Select Execute. ({sql})") + + result = [] + for row in rows: + result.append(dict(zip(column_names, row))) + return result, column_names + + def commit(self): + self.conn.commit() + + def __del__(self) -> None: + self.cursor.close() + config.conn_pool.putconn(self.conn) diff --git a/API-ROUTER/ConnectManager/__init__.py b/API-ROUTER/ConnectManager/__init__.py new file mode 100644 index 00000000..b5c0391e --- /dev/null +++ b/API-ROUTER/ConnectManager/__init__.py @@ -0,0 +1 @@ +from .PostgresManager import * diff --git a/API-ROUTER/Utils/CommonUtil.py b/API-ROUTER/Utils/CommonUtil.py new file mode 100644 index 00000000..94309475 --- /dev/null +++ b/API-ROUTER/Utils/CommonUtil.py @@ -0,0 +1,128 @@ +import os +import configparser +import argparse +from fastapi.logger import logger +from pathlib import Path +from typing import Any, Dict, List +from ApiRoute.ApiRouteConfig import config +from ConnectManager import PostgresManager +from psycopg2 import pool +import sys +import traceback + + +def convert_data(data) -> str: + return f"'{str(data)}'" + + +def set_log_path(): + parser = configparser.ConfigParser() + parser.read(f"{config.root_path}/conf/logging.conf", encoding="utf-8") + + parser.set( + "handler_rotatingFileHandler", + "args", + f"('{config.root_path}/log/API-Router.log', 'a', 20000000, 10)", + ) + + with open(f"{config.root_path}/conf/logging.conf", "w") as f: + parser.write(f) + + +def get_config(config_name: str): + ano_cfg = {} + + conf = configparser.ConfigParser() + config_path = config.root_path + f"/conf/{config_name}" + conf.read(config_path, encoding="utf-8") + for section in conf.sections(): + ano_cfg[section] = {} + for option in conf.options(section): + ano_cfg[section][option] = conf.get(section, option) + + return ano_cfg + + +def parser_params() -> Any: + parser = argparse.ArgumentParser() + parser.add_argument("--host", type=str, default="127.0.0.1") + parser.add_argument("--port", type=int, default=18000) + parser.add_argument("--db_type", default="test") + + return parser.parse_args() + + +def prepare_config() -> None: + args = parser_params() + config.root_path = str( + Path(os.path.dirname(os.path.abspath(__file__))).parent + ) # Path(os.getcwd()).parent + api_router_cfg = get_config("config.ini") + config.api_config = get_config("api_config.ini") + config.db_type = f"{args.db_type}_db" + config.server_host = args.host + config.server_port = args.port + config.db_info = api_router_cfg[config.db_type] + config.conn_pool = make_connection_pool(config.db_info) + config.remote_info = api_router_cfg["remote"] + config.secret_info = api_router_cfg["secret_info"] + + +def make_connection_pool(db_info): + conn_pool = pool.SimpleConnectionPool( + 1, + 20, + user=db_info["user"], + password=db_info["password"], + host=db_info["host"], + port=db_info["port"], + database=db_info["database"], + options=f'-c search_path={db_info["schema"]}', + connect_timeout=10, + ) + return conn_pool + + +def connect_db(): + db = PostgresManager() + return db + + +def save_file_for_reload(): + with open(__file__, "a") as fd: + fd.write(" ") + + +def make_res_msg(result, err_msg, data=None, column_names=None): + header_list = [] + for column_name in column_names: + header = {"column_name": column_name} + header_list.append(header) + + if data is None or column_names is None: + res_msg = {"result": result, "errorMessage": err_msg} + else: + res_msg = { + "result": result, + "errorMessage": err_msg, + "body": data, + "header": header_list, + } + return res_msg + + +def get_exception_info(): + ex_type, ex_value, ex_traceback = sys.exc_info() + trace_back = traceback.extract_tb(ex_traceback) + trace_log = "\n".join([str(trace) for trace in trace_back]) + logger.error( + f"\n- Exception Type : {ex_type}\n- Exception Message : {str(ex_value).strip()}\n- Exception Log : \n{trace_log}" + ) + return ex_type.__name__ + + +def delete_headers(headers: Dict, delete_header: List) -> Dict: + for delete in delete_header: + if headers.get(delete): + del headers[delete] + return headers diff --git a/API-ROUTER/Utils/RouteUtil.py b/API-ROUTER/Utils/RouteUtil.py new file mode 100644 index 00000000..8abf2e0e --- /dev/null +++ b/API-ROUTER/Utils/RouteUtil.py @@ -0,0 +1,115 @@ +import asyncssh +import aiohttp +from fastapi.logger import logger +from fastapi.responses import JSONResponse +from urllib.parse import ParseResult +from ApiRoute.ApiRouteConfig import config +from Utils.CommonUtil import get_exception_info +from typing import Dict + + +def make_url(server_name: str, url_path: str): + for server_info in config.api_server_info: + if server_info["nm"] == server_name: + if len(server_info["ip_adr"]) != 0: + netloc = server_info["ip_adr"] + else: + netloc = server_info["domn_nm"] + url = ParseResult( + scheme="http", + netloc=netloc, + path=url_path, + params="", + query="", + fragment="", + ) + logger.info(f"Message Passing Url : {url.geturl()}") + return url.geturl() + return None + + +def make_route_response(result, api_name, access_token): + response = JSONResponse(content=result) + add_cookie_api_list = config.secret_info["add_cookie_api"].split(",") + if api_name in add_cookie_api_list: + response.set_cookie(key=config.secret_info["cookie_name"], value=access_token) + return response + + +def get_api_info(route_url): + api_info = None + api_params = [] + for api in config.api_info: + if api["route_url"] == route_url: + api_info = api + for params in config.api_params: + if params["api_nm"] == api["api_nm"]: + api_params.append(params) + break + return api_info, api_params + + +async def bypass_msg(api_info, params_query, body, headers): + method = api_info["meth"] + + url = make_url(api_info["ctgry"], api_info["url"]) + if url is None: + return {"result": 0, "errorMessage": "The server info does not exist."}, "" + + async with aiohttp.ClientSession() as session: + if method == "GET": + params = {} + if len(params_query) != 0: + for param in params_query.split("&"): + parser_param = param.split("=") + params[parser_param[0]] = parser_param[1] + + async with session.get(url, params=params, headers=headers) as response: + access_token = response.cookies.get(config.secret_info["cookie_name"]) + result = await response.json() + elif method == "POST": + async with session.post(url, json=body, headers=headers) as response: + access_token = response.cookies.get(config.secret_info["cookie_name"]) + result = await response.json() + else: + logger.error(f"Method Not Allowed. {method}") + result = {"result": 0, "errorMessage": "Method Not Allowed."} + return result, access_token + + +async def run_cmd(cmd: str): + async with asyncssh.connect( + host=config.remote_info["host"], + port=int(config.remote_info["port"]), + username=config.remote_info["id"], + password=config.remote_info["password"], + known_hosts=None, + ) as conn: + logger.info(f"Run Cmd : {cmd}") + result = await conn.run(cmd, check=True) + logger.info(f"Command Result : {result.stdout}") + return result.stdout + + +async def call_remote_func(api_info, api_params, input_params) -> Dict: + command_input = "" + for api_param in api_params: + try: + data = input_params[api_param["nm"]] + if not data: + data = api_param["deflt_val"] + command_input += f' --{api_param["nm"]} "{data}"' + except KeyError: + logger.error(f'parameter set default value. [{api_param["nm"]}]') + command_input += f' --{api_param["nm"]} {api_param["deflt_val"]}' + + cmd = f'{api_info["cmd"]} {command_input}' + + try: + result = await run_cmd(cmd) + except Exception: + except_name = get_exception_info() + res_msg = {"result": 0, "errorMessage": except_name} + else: + res_msg = {"result": 1, "errorMessage": "", "data": eval(result)} + return res_msg diff --git a/API-ROUTER/Utils/__init__.py b/API-ROUTER/Utils/__init__.py new file mode 100644 index 00000000..22c1052e --- /dev/null +++ b/API-ROUTER/Utils/__init__.py @@ -0,0 +1,2 @@ +from .CommonUtil import * +from .RouteUtil import * diff --git a/API-ROUTER/conf/api_config.ini b/API-ROUTER/conf/api_config.ini new file mode 100644 index 00000000..dd1a1278 --- /dev/null +++ b/API-ROUTER/conf/api_config.ini @@ -0,0 +1,9 @@ +[TEST_1] +method = GET +url_prefix = /api +sub_dir = service + +[TEST_2] +method = GET +url_prefix = /api +sub_dir = service \ No newline at end of file diff --git a/API-ROUTER/conf/config.ini b/API-ROUTER/conf/config.ini new file mode 100644 index 00000000..314350b0 --- /dev/null +++ b/API-ROUTER/conf/config.ini @@ -0,0 +1,25 @@ +[remote] +host = 10.10.20.59 +port = 20022 +id = root +password = root123 + +[test_db] +host = 192.168.100.126 +port = 25432 +user = dpsi +password = hello.sitemng12#$ +database = dataportal +schema = sitemng + +[commercial_db] +host = 192.168.54.60 +port = 5432 +user = dpsi +password = hello.sitemng12#$ +database = dataportal +schema = sitemng + +[secret_info] +cookie_name = user-katech-access-token +add_cookie_api = commonLogin,commonLogout,commonToken \ No newline at end of file diff --git a/API-ROUTER/conf/logging.conf b/API-ROUTER/conf/logging.conf new file mode 100644 index 00000000..6aed4f64 --- /dev/null +++ b/API-ROUTER/conf/logging.conf @@ -0,0 +1,28 @@ +[loggers] +keys = root + +[logger_root] +level = INFO +handlers = console,rotatingFileHandler + +[formatters] +keys = default + +[formatter_default] +format = %(asctime)s %(levelname)s [%(filename)s:%(lineno)d] - %(message)s + +[handlers] +keys = console,rotatingFileHandler + +[handler_console] +class = StreamHandler +args = (sys.stdout,) +formatter = default +level = INFO + +[handler_rotatingFileHandler] +class = handlers.RotatingFileHandler +formatter = default +args = ('/Users/cbc/DEV/Mobigen/API_DataPortal/Katech/AP_API_Router/API-ROUTER/log/API-Router.log', 'a', 20000000, 10) +level = INFO + diff --git a/API-ROUTER/requirements.txt b/API-ROUTER/requirements.txt new file mode 100644 index 00000000..3231aff7 --- /dev/null +++ b/API-ROUTER/requirements.txt @@ -0,0 +1,35 @@ +aiohttp==3.8.3 +aiosignal==1.2.0 +anyio==3.6.2 +asgiref==3.5.2 +async-timeout==4.0.2 +asyncssh==2.12.0 +attrs==22.1.0 +bcrypt==4.0.1 +certifi==2022.9.24 +cffi==1.15.1 +charset-normalizer==2.0.12 +click==8.1.3 +cryptography==38.0.1 +decorator==5.1.1 +fastapi==0.75.2 +frozenlist==1.3.1 +h11==0.14.0 +idna==3.4 +multidict==6.0.2 +paramiko==2.10.3 +psycopg2==2.8.6 +py==1.11.0 +pycparser==2.21 +pydantic==1.10.2 +PyJWT==2.6.0 +PyNaCl==1.5.0 +requests==2.27.1 +retry==0.9.2 +six==1.16.0 +sniffio==1.3.0 +starlette==0.17.1 +typing_extensions==4.4.0 +urllib3==1.26.12 +uvicorn==0.16.0 +yarl==1.8.1 diff --git a/API-ROUTER/safe_start.sh b/API-ROUTER/safe_start.sh new file mode 100644 index 00000000..476fbeb8 --- /dev/null +++ b/API-ROUTER/safe_start.sh @@ -0,0 +1,62 @@ +app_name=API-Router +router_host=$1 +router_port=$2 +router_db=$3 + +input() { + if [[ $router_host == "" ]];then + router_host=192.168.100.126 + fi + if [[ $router_port == "" ]];then + router_port=9010 + fi + if [[ $router_db == "" ]];then + router_db=test + fi +} + +router_stop() { + app=$( ps -ef | grep python | grep server.py | grep ${router_host} | grep ${router_port} | awk '{print $2}' ) + if [[ $app != "" ]];then + exit_app="kill -9 ${app}" + echo "Stop Command ( router ) : "${exit_app} + $exit_app + else + echo "Not Found application. ( router )" + fi +} + +uvicorn_stop() { + uvicorn=$( netstat -nlp | grep ${router_host}':'${router_port} | awk '{print $7}' | tr "/" "\n" ) + if [[ $uvicorn != "" ]];then + for i in $uvicorn + do + if [[ ${i} == *python* ]];then + continue + fi + exit_uvicorn="kill -9 ${i}" + echo "Stop Command ( uvicorn ) : "${exit_uvicorn} + $exit_uvicorn + done + else + echo "Not Found application. ( uvicorn )" + fi +} + +router_start() { + source_path="$( cd "$( dirname "$0" )" && pwd -P )" + router_exec="nohup python3 ${source_path}/server.py --host ${router_host} --port ${router_port} --db_type ${router_db} 1> /dev/null 2>&1 &" + echo "Start Command : ${router_exec}" + nohup python3 ${source_path}/server.py --host ${router_host} --port ${router_port} --db_type ${router_db} 1> /dev/null 2>&1 & +} + +echo "########## Safe Start (${app_name}) ##########" +echo "========== STOP ${app_name} ==========" +input + +router_stop +sleep 2 +uvicorn_stop + +echo "========== START ${app_name} ==========" +router_start diff --git a/API-ROUTER/server.py b/API-ROUTER/server.py new file mode 100644 index 00000000..acc59b09 --- /dev/null +++ b/API-ROUTER/server.py @@ -0,0 +1,28 @@ +from fastapi import FastAPI +import uvicorn +from ApiRoute.ApiRouteConfig import config +from Utils.CommonUtil import prepare_config, set_log_path +from ApiRoute import ApiRoute +import os + +prepare_config() +api_router = ApiRoute() +app = FastAPI() +app.include_router(api_router.router) + +if __name__ == "__main__": + log_dir = f"{config.root_path}/log" + if os.path.isdir(log_dir): + print("Directory Exists") + else: + print(f"Make log dir : {log_dir}") + os.makedirs(log_dir) + + set_log_path() + uvicorn.run( + "server:app", + host=config.server_host, + port=config.server_port, + reload=True, + log_config=f"{config.root_path}/conf/logging.conf", + ) diff --git a/API-ROUTER/start.sh b/API-ROUTER/start.sh new file mode 100644 index 00000000..7363e659 --- /dev/null +++ b/API-ROUTER/start.sh @@ -0,0 +1,29 @@ +app_name=API-Router +router_host=$1 +router_port=$2 +router_db=$3 + +input() { + if [[ $router_host == "" ]];then + router_host=192.168.100.126 + fi + if [[ $router_port == "" ]];then + router_port=8010 + fi + if [[ $router_db == "" ]];then + router_db=test + fi +} + +router_start() { + source_path="$( cd "$( dirname "$0" )" && pwd -P )" + router_exec="nohup python3 ${source_path}/server.py --host ${router_host} --port ${router_port} --db_type ${router_db} 1> /dev/null 2>&1 &" + echo "Start Command : ${router_exec}" + nohup python3 ${source_path}/server.py --host ${router_host} --port ${router_port} --db_type ${router_db} 1> /dev/null 2>&1 & +} + +echo "########## Start Application (${app_name}) ##########" +echo "========== START ${app_name} ==========" +input + +router_start diff --git a/API-ROUTER/stop.sh b/API-ROUTER/stop.sh new file mode 100644 index 00000000..3f9cb035 --- /dev/null +++ b/API-ROUTER/stop.sh @@ -0,0 +1,47 @@ +app_name=API-Router +router_host=$1 +router_port=$2 + +input() { + if [[ $router_host == "" ]];then + router_host=192.168.100.126 + fi + if [[ $router_port == "" ]];then + router_port=9010 + fi +} + +router_stop() { + app=$( ps -ef | grep python | grep server.py | grep ${router_host} | grep ${router_port} | awk '{print $2}' ) + if [[ $app != "" ]];then + exit_app="kill -9 ${app}" + echo "Stop Command ( router ) : "${exit_app} + $exit_app + else + echo "Not Found application. ( router )" + fi +} + +uvicorn_stop() { + uvicorn=$( netstat -nlp | grep ${router_host}':'${router_port} | awk '{print $7}' | tr "/" "\n" ) + if [[ $uvicorn != "" ]];then + for i in $uvicorn + do + if [[ ${i} == *python* ]];then + continue + fi + exit_uvicorn="kill -9 ${i}" + echo "Stop Command ( uvicorn ) : "${exit_uvicorn} + $exit_uvicorn + done + else + echo "Not Found application. ( uvicorn )" + fi +} + +echo "########## Stop Application (${app_name}) ##########" +echo "========== STOP ${app_name} ==========" +input +router_stop +sleep 2 +uvicorn_stop diff --git a/API-SERVICE/ApiList/__init__.py b/API-SERVICE/ApiList/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/API-SERVICE/ApiList/common/__init__.py b/API-SERVICE/ApiList/common/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/API-SERVICE/ApiList/common/commonExecute.py b/API-SERVICE/ApiList/common/commonExecute.py new file mode 100644 index 00000000..7f0fc4c1 --- /dev/null +++ b/API-SERVICE/ApiList/common/commonExecute.py @@ -0,0 +1,67 @@ +from typing import Dict, List, Optional +from pydantic import BaseModel +from Utils.CommonUtil import connect_db, get_exception_info, convert_data + + +class commonExecute(BaseModel): + method: str + table_nm: str + data: Dict + key: Optional[List[str]] = None + + +def make_insert_query(excute: commonExecute): + columns = ", ".join(excute.data.keys()) + values = ", ".join(map(convert_data, excute.data.values())) + return f"INSERT INTO {excute.table_nm} ({columns}) VALUES ({values});" + + +def make_update_query(excute: commonExecute): + where = [] + update_data = [ + f"{key} = {convert_data(value)}" for key, value in excute.data.items() + ] + for key in excute.key: + where.append(f"{key} = {convert_data(excute.data.get(key))}") + return f'UPDATE {excute.table_nm} SET {",".join(update_data)}\ + WHERE {" AND ".join(where)};' + + +def make_delete_query(excute: commonExecute): + where = [] + for key in excute.key: + where.append(f"{key} = {convert_data(excute.data.get(key))}") + return f'DELETE FROM {excute.table_nm} WHERE {" AND ".join(where)};' + + +def make_execute_query(excute: commonExecute): + method = excute.method + query = None + if method == "INSERT": + query = make_insert_query(excute) + elif method == "UPDATE": + query = make_update_query(excute) + elif method == "DELETE": + query = make_delete_query(excute) + else: + raise ValueError(f"Invalid Method. ({method}))") + return query + + +def api(excute_list: List[commonExecute]) -> Dict: + query_list = [] + try: + for excute in excute_list: + query_list.append(make_execute_query(excute)) + + db = connect_db() + time_zone = "Asia/Seoul" + db.execute(f"SET TIMEZONE={convert_data(time_zone)}") + db.multiple_excute(query_list) + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + result = {"result": 1, "errorMessage": ""} + + return result diff --git a/API-SERVICE/ApiList/common/commonLogin.py b/API-SERVICE/ApiList/common/commonLogin.py new file mode 100644 index 00000000..ea61a0b5 --- /dev/null +++ b/API-SERVICE/ApiList/common/commonLogin.py @@ -0,0 +1,43 @@ +from datetime import timedelta +from typing import Dict + +from fastapi.responses import JSONResponse +from pydantic import BaseModel + +from ApiService.ApiServiceConfig import config +from Utils.CommonUtil import ( + get_exception_info, + create_token, + make_token_data, + authenticate_user, +) + + +class commonLogin(BaseModel): + data: Dict + + +def api(login: commonLogin): + """ + id_column = user_id + password_column = user_password + """ + access_token = "" + try: + user = authenticate_user( + login.data[config.user_info["id_column"]], + login.data[config.user_info["password_column"]], + ) + token_data = make_token_data(user) + access_token = create_token( + data=token_data, + expires_delta=timedelta(minutes=int(config.secret_info["expire_min"])), + ) + result = {"result": 1, "errorMessage": ""} + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + + response = JSONResponse(content=result) + response.set_cookie(key=config.secret_info["cookie_name"], value=access_token) + return response diff --git a/API-SERVICE/ApiList/common/commonLogout.py b/API-SERVICE/ApiList/common/commonLogout.py new file mode 100644 index 00000000..b5944363 --- /dev/null +++ b/API-SERVICE/ApiList/common/commonLogout.py @@ -0,0 +1,36 @@ +from typing import Dict +from fastapi.logger import logger +from fastapi.responses import JSONResponse +from jose import jwt +from starlette.requests import Request + +from Utils.CommonUtil import get_exception_info, get_user +from ApiService.ApiServiceConfig import config +from Utils.exceptions import TokenDoesNotExist, InvalidUserInfo + + +def api(request: Request) -> Dict: + f_delete = True + try: + recv_access_token = request.cookies.get(config.secret_info["cookie_name"]) + if not recv_access_token: + raise TokenDoesNotExist + payload = jwt.decode( + token=recv_access_token, + key=config.secret_info["secret_key"], + algorithms=config.secret_info["algorithm"], + ) + username = payload[config.user_info["id_column"]] + user = get_user(username) + if not user[0]: + raise InvalidUserInfo + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + # f_delete = False + else: + result = {"result": 1, "errorMessage": ""} + response = JSONResponse(content=result) + if f_delete: + response.delete_cookie(key=config.secret_info["cookie_name"]) + return response diff --git a/API-SERVICE/ApiList/common/commonPassword.py b/API-SERVICE/ApiList/common/commonPassword.py new file mode 100644 index 00000000..b8114026 --- /dev/null +++ b/API-SERVICE/ApiList/common/commonPassword.py @@ -0,0 +1,99 @@ +from typing import Dict, Optional +from pydantic import BaseModel +from fastapi.logger import logger +from fastapi.requests import Request +from jose import jwt +from Utils.CommonUtil import ( + connect_db, + get_exception_info, + convert_data, + authenticate_user, +) +from ApiService.ApiServiceConfig import config +from Utils.exceptions import InvalidUserInfo, TokenDoesNotExist + + +class commonPassword(BaseModel): + """ + data: Dict = { + "user_id": email, + "password": current password + } + + """ + + data: Dict + new_password: Optional[str] = "" + + +def is_auth_role(user_role) -> bool: + auth_role = config.user_info["user_role"].split(",") + for role in user_role.split("|"): + if role in auth_role: + return True + return False + + +def reset_to_new_password(id: str, new_password: str, user_info_table: str, user_role: str) -> bool: + if not is_auth_role(user_role): + return False + db = connect_db() + time_zone = "Asia/Seoul" + db.execute(f"SET TIMEZONE={convert_data(time_zone)}") + db.execute( + f""" + UPDATE + {user_info_table} + SET + {config.user_info["password_column"]} = {convert_data(config.pwd_context.hash(new_password))}, + {config.user_info["normal_password"]} = {convert_data(new_password)} + WHERE + {config.user_info["id_column"]} = {convert_data(id)}; + """ + ) + + return True + + +def get_payload(cookies): + recv_access_token = cookies.get(config.secret_info["cookie_name"]) + if not recv_access_token: + raise TokenDoesNotExist + return jwt.decode( + token=recv_access_token, + key=config.secret_info["secret_key"], + algorithms=config.secret_info["algorithm"], + ) + + +def api(password: commonPassword, request: Request) -> Dict: + user_id = password.data.get(config.user_info["id_column"]) + cur_password = password.data.get(config.user_info["password_column"]) + new_password = password.new_password + user_info_table = config.user_info["table"] + + try: + payload = get_payload(request.cookies) + user_role = payload["user_role"] + if reset_to_new_password(user_id, new_password, user_info_table, user_role): + return {"result": 1, "errorMessage": ""} + if not cur_password: + raise InvalidUserInfo("user_password") + + db = connect_db() + authenticate_user(user_id, cur_password) + if new_password: + logger.info("Change Password") + time_zone = "Asia/Seoul" + db.execute(f"SET TIMEZONE={convert_data(time_zone)}") + db.execute( + f'UPDATE {user_info_table} SET {config.user_info["password_column"]} = {convert_data(config.pwd_context.hash(new_password))},' + f' {config.user_info["normal_password"]} = {convert_data(new_password)} WHERE {config.user_info["id_column"]} = {convert_data(user_id)};' + ) + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + result = {"result": 1, "errorMessage": ""} + + return result diff --git a/API-SERVICE/ApiList/common/commonRegister.py b/API-SERVICE/ApiList/common/commonRegister.py new file mode 100644 index 00000000..9b209529 --- /dev/null +++ b/API-SERVICE/ApiList/common/commonRegister.py @@ -0,0 +1,39 @@ +from typing import Dict +from pydantic import BaseModel +from fastapi.logger import logger +from Utils.CommonUtil import connect_db, get_exception_info, convert_data +from ApiService.ApiServiceConfig import config + + +class commonRegister(BaseModel): + data: Dict + + +def make_register_query(register: commonRegister): + password_column = config.user_info["password_column"] + user_info_table = config.user_info["table"] + + # at 221109 by seokwoo-yang, password 평문 필요 요청 + register.data["user_normal"] = register.data[password_column] + register.data[password_column] = config.pwd_context.hash(register.data[password_column]) + columns = ", ".join(register.data.keys()) + values = ", ".join(map(convert_data, register.data.values())) + query = f"INSERT INTO {user_info_table} ({columns}) VALUES ({values});" + return query + + +def api(register: commonRegister) -> Dict: + try: + query = make_register_query(register) + + db = connect_db() + time_zone = "Asia/Seoul" + db.execute(f"SET TIMEZONE={convert_data(time_zone)}") + db.execute(query) + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + result = {"result": 1, "errorMessage": ""} + + return result diff --git a/API-SERVICE/ApiList/common/commonSelect.py b/API-SERVICE/ApiList/common/commonSelect.py new file mode 100644 index 00000000..f77e89f6 --- /dev/null +++ b/API-SERVICE/ApiList/common/commonSelect.py @@ -0,0 +1,138 @@ +from typing import Dict, List, Optional +from ApiService.ApiServiceConfig import config +from Utils.CommonUtil import connect_db, make_res_msg, get_exception_info, convert_data +from pydantic import BaseModel +from fastapi.logger import logger + + +class joinInfo(BaseModel): + table_nm: str + key: str + + +class subWhereInfo(BaseModel): + table_nm: str + key: str + value: str + compare_op: str + op: Optional[str] = "" + + +class whereInfo(BaseModel): + table_nm: str + key: str + value: str + compare_op: str + op: Optional[str] = "" + sub: Optional[List[subWhereInfo]] = None + + +class orderInfo(BaseModel): + table_nm: str + key: str + order: str + + +class pageInfo(BaseModel): + per_page: int + cur_page: int + + +class commonSelect(BaseModel): + table_nm: str + key: Optional[str] = None + join_info: Optional[joinInfo] = None + where_info: Optional[List[whereInfo]] = None + order_info: Optional[orderInfo] = None + page_info: Optional[pageInfo] = None + + +def convert_compare_op(compare_str): + if compare_str == "Equal": + compare_op = "=" + elif compare_str == "Not Equal": + compare_op = "!=" + elif compare_str == "Greater Than": + compare_op = ">" + elif compare_str == "Greater Than or Equal": + compare_op = ">=" + elif compare_str == "Less Than": + compare_op = "<" + elif compare_str == "Less Than or Equal": + compare_op = "<=" + else: + compare_op = compare_str + return compare_op + + +def make_where_value(where): + if where.compare_op == "IN" or where.compare_op == "NOT IN": + value_list = ", ".join(map(convert_data, where.value.split(","))) + value = f"( {value_list} )" + else: + value = convert_data(where.value) + return value + + +def make_where_info(where_info: List[whereInfo]): + where = "" + for info in where_info: + value = make_where_value(info) + if info.sub: + sub_where = f"{info.table_nm}.{info.key} {convert_compare_op(info.compare_op)} {value}" + for sub_info in info.sub: + sub_value = make_where_value(sub_info) + sub_where = f"{sub_where} {sub_info.op} {sub_info.table_nm}.{sub_info.key} {convert_compare_op(sub_info.compare_op)} {sub_value}" + where = f"{where} {info.op} ({sub_where})" + else: + where = f"{where} {info.op} {info.table_nm}.{info.key} {convert_compare_op(info.compare_op)} {value}" + return f"WHERE {where}" + + +def make_select_query(select_info: commonSelect): + join, where, order, page = "", "", "", "" + join_info, where_info, order_info, page_info = ( + select_info.join_info, + select_info.where_info, + select_info.order_info, + select_info.page_info, + ) + if join_info: + join = f"JOIN {join_info.table_nm} ON {select_info.table_nm}.{select_info.key} = {join_info.table_nm}.{join_info.key}" + if where_info: + where = make_where_info(where_info) + if order_info: + order = f"ORDER BY {order_info.table_nm}.{order_info.key} {order_info.order}" + if page_info: + page = f"LIMIT {page_info.per_page} OFFSET ({page_info.per_page} * {page_info.cur_page - 1})" + + select_query = ( + f"SELECT * FROM {select_info.table_nm} {join} {where} {order} {page};" + ) + count_query = f"SELECT count(*) FROM {select_info.table_nm} {join} {where};" + + return select_query, count_query + + +def api(select_info: commonSelect) -> Dict: + get_column_info = f"SELECT eng_nm, kor_nm FROM tb_table_column_info \ + WHERE table_id = (SELECT table_id FROM tb_table_list WHERE table_nm = {convert_data(select_info.table_nm)});" + get_query, total_cnt_query = make_select_query(select_info) + logger.info(f"Get Query : {get_query}") + + try: + db = connect_db() + select_data, _ = db.select(get_query) + if select_info.page_info: + total_cnt = db.select(total_cnt_query) + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + column_info, _ = db.select(get_column_info) + kor_nm_list = [map_data["kor_nm"] for map_data in column_info] + eng_nm_list = [map_data["eng_nm"] for map_data in column_info] + result = make_res_msg(1, "", select_data, eng_nm_list, kor_nm_list) + if select_info.page_info: + result["data"].update(total_cnt[0][0]) + return result diff --git a/API-SERVICE/ApiList/common/commonToken.py b/API-SERVICE/ApiList/common/commonToken.py new file mode 100644 index 00000000..a3253f8e --- /dev/null +++ b/API-SERVICE/ApiList/common/commonToken.py @@ -0,0 +1,41 @@ +from typing import Dict +from fastapi.logger import logger +from fastapi.responses import JSONResponse +from datetime import timedelta +from jose import jwt +from Utils.CommonUtil import get_exception_info, get_user, create_token, make_token_data +from ApiService.ApiServiceConfig import config +from starlette.requests import Request +from Utils.exceptions import TokenDoesNotExist, InvalidUserInfo + + +def api(request: Request) -> Dict: + access_token = "" + try: + recv_access_token = request.cookies.get(config.secret_info["cookie_name"]) + if not recv_access_token: + raise TokenDoesNotExist + payload = jwt.decode( + token=recv_access_token, + key=config.secret_info["secret_key"], + algorithms=config.secret_info["algorithm"], + ) + username = payload[config.user_info["id_column"]] + user = get_user(username) + if not user[0]: + raise InvalidUserInfo + user = user[0][0] + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + token_data = make_token_data(user) + access_token = create_token( + data=token_data, + expires_delta=timedelta(minutes=int(config.secret_info["expire_min"])), + ) + result = {"result": 1, "errorMessage": ""} + + response = JSONResponse(content=result) + response.set_cookie(key=config.secret_info["cookie_name"], value=access_token) + return response diff --git a/API-SERVICE/ApiList/common/commonUserInfo.py b/API-SERVICE/ApiList/common/commonUserInfo.py new file mode 100644 index 00000000..e67cabc9 --- /dev/null +++ b/API-SERVICE/ApiList/common/commonUserInfo.py @@ -0,0 +1,32 @@ +from typing import Dict +from fastapi.logger import logger +from jose import jwt +from starlette.requests import Request + +from Utils.CommonUtil import get_exception_info, get_user, make_res_msg +from ApiService.ApiServiceConfig import config +from Utils.exceptions import TokenDoesNotExist, InvalidUserInfo + + +def api(request: Request) -> Dict: + try: + recv_access_token = request.cookies.get(config.secret_info["cookie_name"]) + if not recv_access_token: + raise TokenDoesNotExist + payload = jwt.decode( + token=recv_access_token, + key=config.secret_info["secret_key"], + algorithms=config.secret_info["algorithm"], + ) + username = payload[config.user_info["id_column"]] + user = get_user(username) + if not user[0]: + raise InvalidUserInfo + user = user[0][0] + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + result = {"result": 1, "errorMessage": "", "data": {"body": payload}} + + return result diff --git a/API-SERVICE/ApiList/common/emailAthnCnfm.py b/API-SERVICE/ApiList/common/emailAthnCnfm.py new file mode 100644 index 00000000..032b0f9d --- /dev/null +++ b/API-SERVICE/ApiList/common/emailAthnCnfm.py @@ -0,0 +1,39 @@ +from typing import Dict +from fastapi.logger import logger +from pydantic import BaseModel +from Utils.CommonUtil import get_exception_info, connect_db, convert_data + + +class EmailAuthFail(Exception): + pass + + +class EmailAthnCnfm(BaseModel): + email: str + athn_no: str + + +def api(email_confirm: EmailAthnCnfm) -> Dict: + try: + db = connect_db() + email_info, _ = db.select( + f"SELECT * FROM tb_email_athn_info WHERE email={convert_data(email_confirm.email)}" + ) + + if email_info[0]["athn_no"] == email_confirm.athn_no: + time_zone = "Asia/Seoul" + db.execute(f"SET TIMEZONE={convert_data(time_zone)}") + db.execute( + f"UPDATE tb_email_athn_info \ + SET athn_yn='Y', athn_date=NOW() WHERE email={convert_data(email_confirm.email)};" + ) + else: + raise EmailAuthFail + logger.info("Successfully Auth Confirm.") + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + result = {"result": 1, "errorMessage": ""} + + return result diff --git a/API-SERVICE/ApiList/common/emailAthnPass.py b/API-SERVICE/ApiList/common/emailAthnPass.py new file mode 100644 index 00000000..d3b5ad60 --- /dev/null +++ b/API-SERVICE/ApiList/common/emailAthnPass.py @@ -0,0 +1,47 @@ +from typing import Dict +from fastapi.logger import logger +from pydantic import BaseModel +from Utils.CommonUtil import get_exception_info, connect_db, convert_data +from ApiService.ApiServiceConfig import config + + +class EmailAuthFail(Exception): + pass + + +class EmailAthnPass(BaseModel): + email: str + athn_no: str + new_password: str + + +def api(email_athn_pass: EmailAthnPass) -> Dict: + user_id = email_athn_pass.email + new_password = email_athn_pass.new_password + user_info_table = config.user_info["table"] + try: + db = connect_db() + email_info, _ = db.select( + f"SELECT * FROM tb_email_athn_info WHERE email={convert_data(email_athn_pass.email)}" + ) + + if email_info[0]["athn_no"] == email_athn_pass.athn_no: + time_zone = "Asia/Seoul" + db.execute(f"SET TIMEZONE={convert_data(time_zone)}") + if email_info[0]["athn_yn"] == "Y": + db.execute( + f'UPDATE {user_info_table} SET {config.user_info["password_column"]} = {convert_data(config.pwd_context.hash(new_password))} \ + WHERE {config.user_info["id_column"]} = {convert_data(user_id)};' + ) + else: + raise EmailAuthFail + else: + raise EmailAuthFail + logger.info("Successfully Auth Password.") + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + result = {"result": 1, "errorMessage": ""} + + return result diff --git a/API-SERVICE/ApiList/common/emailAthnSend.py b/API-SERVICE/ApiList/common/emailAthnSend.py new file mode 100644 index 00000000..86f71da2 --- /dev/null +++ b/API-SERVICE/ApiList/common/emailAthnSend.py @@ -0,0 +1,81 @@ +import random +import string +from typing import Dict + +from fastapi.logger import logger +from pydantic import BaseModel + +from ApiService.ApiServiceConfig import config +from Utils import insert_mail_history +from Utils.CommonUtil import ( + get_exception_info, + connect_db, + convert_data, + send_template_mail, +) + + +class EmailNotAuth(Exception): + pass + + +class EmailNotExist(Exception): + pass + + +class EmailAthnSend(BaseModel): + email: str + msg_type: str # register or password + + +def make_auth_no(): + string_pool = string.ascii_letters + string.digits + auth_no = "" + for _ in range(int(config.email_auth["auth_no_len"])): + auth_no += random.choice(string_pool) + return auth_no + + +def make_email_auth_query(email, auth_no, exist_mail): + if exist_mail: + query = f"UPDATE tb_email_athn_info \ + SET athn_no={convert_data(auth_no)}, send_date=NOW() WHERE email={convert_data(email)};" + else: + query = f"INSERT INTO tb_email_athn_info (email, athn_no, athn_yn, send_date) \ + VALUES ({convert_data(email)}, {convert_data(auth_no)}, 'N', NOW());" + return query + + +def api(email_auth: EmailAthnSend) -> Dict: + try: + auth_no = make_auth_no() + db = connect_db() + exist_mail, _ = db.select(f"SELECT * FROM tb_email_athn_info WHERE email={convert_data(email_auth.email)}") + + if email_auth.msg_type == "password": + if len(exist_mail) == 0: + raise EmailNotExist + if exist_mail[0]["athn_yn"] == "N": + raise EmailNotAuth + + send_template_mail(auth_no, email_auth.email, email_auth.msg_type) + insert_mail_history( + rcv_adr=email_auth.email, + title=config.email_auth[f"subject_{email_auth.msg_type}"], + contents=auth_no, + tmplt_cd=email_auth.msg_type, + ) + + time_zone = "Asia/Seoul" + db.execute(f"SET TIMEZONE={convert_data(time_zone)}") + query = make_email_auth_query(email_auth.email, auth_no, exist_mail) + db.execute(query) + + logger.info("Successfully sent the mail.") + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + result = {"result": 1, "errorMessage": ""} + + return result diff --git a/API-SERVICE/ApiList/common/emailDataShare.py b/API-SERVICE/ApiList/common/emailDataShare.py new file mode 100644 index 00000000..a5e64081 --- /dev/null +++ b/API-SERVICE/ApiList/common/emailDataShare.py @@ -0,0 +1,27 @@ +from pydantic import BaseModel + +from ApiService.ApiServiceConfig import config +from Utils import insert_mail_history +from Utils.CommonUtil import send_template_mail, get_exception_info + + +class EmailInfo(BaseModel): + email: str + msg_type: str # share + message: str + + +def api(params: EmailInfo): + try: + send_template_mail(params.message, params.email, params.msg_type) + insert_mail_history( + rcv_adr=params.email, + title=config.email_auth[f"subject_{params.msg_type}"], + contents=params.message, + tmplt_cd=params.msg_type, + ) + + return {"result": 1, "errorMessage": ""} + except Exception: + except_name = get_exception_info() + return {"result": 0, "errorMessage": except_name} diff --git a/API-SERVICE/ApiList/meta/__init__.py b/API-SERVICE/ApiList/meta/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/API-SERVICE/ApiList/meta/addChildCategory.py b/API-SERVICE/ApiList/meta/addChildCategory.py new file mode 100644 index 00000000..4816ed40 --- /dev/null +++ b/API-SERVICE/ApiList/meta/addChildCategory.py @@ -0,0 +1,25 @@ +import uuid +from typing import Dict +from ApiService.ApiServiceConfig import config +from Utils.CommonUtil import connect_db, get_exception_info, convert_data +from pydantic import BaseModel + + +class addChildCategory(BaseModel): + prnts_id: str + node_nm: str + + +def api(insert: addChildCategory) -> Dict: + query = f"INSERT INTO tb_category (node_nm, prnts_id, node_id)\ + VALUES ({convert_data(insert.node_nm)},{convert_data(insert.prnts_id)},{convert_data(uuid.uuid4())});" + + try: + db = connect_db() + db.execute(query) + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + result = {"result": 1, "errorMessage": ""} + return result diff --git a/API-SERVICE/ApiList/meta/deleteElsBizMeta.py b/API-SERVICE/ApiList/meta/deleteElsBizMeta.py new file mode 100644 index 00000000..f3577a2a --- /dev/null +++ b/API-SERVICE/ApiList/meta/deleteElsBizMeta.py @@ -0,0 +1,24 @@ +from typing import Dict +from pydantic import BaseModel +from Utils.CommonUtil import get_exception_info +from ELKSearch.Manager.manager import ElasticSearchManager +from ApiService.ApiServiceConfig import config +from ELKSearch.Utils.database_utils import get_config + + +class DeleteData(BaseModel): + biz_dataset_id: str + + +def api(input: DeleteData) -> Dict: + els_config = get_config(config.root_path, "config.ini")[config.db_type[:-3]] + try: + es = ElasticSearchManager(**els_config) + es.delete("biz_dataset_id", input.biz_dataset_id) + + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + result = {"result": 1, "errorMessage": ""} + return result diff --git a/API-SERVICE/ApiList/meta/emailAthnCnfm.py b/API-SERVICE/ApiList/meta/emailAthnCnfm.py new file mode 100644 index 00000000..032b0f9d --- /dev/null +++ b/API-SERVICE/ApiList/meta/emailAthnCnfm.py @@ -0,0 +1,39 @@ +from typing import Dict +from fastapi.logger import logger +from pydantic import BaseModel +from Utils.CommonUtil import get_exception_info, connect_db, convert_data + + +class EmailAuthFail(Exception): + pass + + +class EmailAthnCnfm(BaseModel): + email: str + athn_no: str + + +def api(email_confirm: EmailAthnCnfm) -> Dict: + try: + db = connect_db() + email_info, _ = db.select( + f"SELECT * FROM tb_email_athn_info WHERE email={convert_data(email_confirm.email)}" + ) + + if email_info[0]["athn_no"] == email_confirm.athn_no: + time_zone = "Asia/Seoul" + db.execute(f"SET TIMEZONE={convert_data(time_zone)}") + db.execute( + f"UPDATE tb_email_athn_info \ + SET athn_yn='Y', athn_date=NOW() WHERE email={convert_data(email_confirm.email)};" + ) + else: + raise EmailAuthFail + logger.info("Successfully Auth Confirm.") + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + result = {"result": 1, "errorMessage": ""} + + return result diff --git a/API-SERVICE/ApiList/meta/emailAthnPass.py b/API-SERVICE/ApiList/meta/emailAthnPass.py new file mode 100644 index 00000000..d3b5ad60 --- /dev/null +++ b/API-SERVICE/ApiList/meta/emailAthnPass.py @@ -0,0 +1,47 @@ +from typing import Dict +from fastapi.logger import logger +from pydantic import BaseModel +from Utils.CommonUtil import get_exception_info, connect_db, convert_data +from ApiService.ApiServiceConfig import config + + +class EmailAuthFail(Exception): + pass + + +class EmailAthnPass(BaseModel): + email: str + athn_no: str + new_password: str + + +def api(email_athn_pass: EmailAthnPass) -> Dict: + user_id = email_athn_pass.email + new_password = email_athn_pass.new_password + user_info_table = config.user_info["table"] + try: + db = connect_db() + email_info, _ = db.select( + f"SELECT * FROM tb_email_athn_info WHERE email={convert_data(email_athn_pass.email)}" + ) + + if email_info[0]["athn_no"] == email_athn_pass.athn_no: + time_zone = "Asia/Seoul" + db.execute(f"SET TIMEZONE={convert_data(time_zone)}") + if email_info[0]["athn_yn"] == "Y": + db.execute( + f'UPDATE {user_info_table} SET {config.user_info["password_column"]} = {convert_data(config.pwd_context.hash(new_password))} \ + WHERE {config.user_info["id_column"]} = {convert_data(user_id)};' + ) + else: + raise EmailAuthFail + else: + raise EmailAuthFail + logger.info("Successfully Auth Password.") + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + result = {"result": 1, "errorMessage": ""} + + return result diff --git a/API-SERVICE/ApiList/meta/emailAthnSend.py b/API-SERVICE/ApiList/meta/emailAthnSend.py new file mode 100644 index 00000000..86f71da2 --- /dev/null +++ b/API-SERVICE/ApiList/meta/emailAthnSend.py @@ -0,0 +1,81 @@ +import random +import string +from typing import Dict + +from fastapi.logger import logger +from pydantic import BaseModel + +from ApiService.ApiServiceConfig import config +from Utils import insert_mail_history +from Utils.CommonUtil import ( + get_exception_info, + connect_db, + convert_data, + send_template_mail, +) + + +class EmailNotAuth(Exception): + pass + + +class EmailNotExist(Exception): + pass + + +class EmailAthnSend(BaseModel): + email: str + msg_type: str # register or password + + +def make_auth_no(): + string_pool = string.ascii_letters + string.digits + auth_no = "" + for _ in range(int(config.email_auth["auth_no_len"])): + auth_no += random.choice(string_pool) + return auth_no + + +def make_email_auth_query(email, auth_no, exist_mail): + if exist_mail: + query = f"UPDATE tb_email_athn_info \ + SET athn_no={convert_data(auth_no)}, send_date=NOW() WHERE email={convert_data(email)};" + else: + query = f"INSERT INTO tb_email_athn_info (email, athn_no, athn_yn, send_date) \ + VALUES ({convert_data(email)}, {convert_data(auth_no)}, 'N', NOW());" + return query + + +def api(email_auth: EmailAthnSend) -> Dict: + try: + auth_no = make_auth_no() + db = connect_db() + exist_mail, _ = db.select(f"SELECT * FROM tb_email_athn_info WHERE email={convert_data(email_auth.email)}") + + if email_auth.msg_type == "password": + if len(exist_mail) == 0: + raise EmailNotExist + if exist_mail[0]["athn_yn"] == "N": + raise EmailNotAuth + + send_template_mail(auth_no, email_auth.email, email_auth.msg_type) + insert_mail_history( + rcv_adr=email_auth.email, + title=config.email_auth[f"subject_{email_auth.msg_type}"], + contents=auth_no, + tmplt_cd=email_auth.msg_type, + ) + + time_zone = "Asia/Seoul" + db.execute(f"SET TIMEZONE={convert_data(time_zone)}") + query = make_email_auth_query(email_auth.email, auth_no, exist_mail) + db.execute(query) + + logger.info("Successfully sent the mail.") + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + result = {"result": 1, "errorMessage": ""} + + return result diff --git a/API-SERVICE/ApiList/meta/getCategoryList.py b/API-SERVICE/ApiList/meta/getCategoryList.py new file mode 100644 index 00000000..76716c82 --- /dev/null +++ b/API-SERVICE/ApiList/meta/getCategoryList.py @@ -0,0 +1,17 @@ +from typing import Dict +from ApiService.ApiServiceConfig import config +from Utils.CommonUtil import connect_db, get_exception_info + + +def api() -> Dict: + category_query = "SELECT * FROM tb_category ORDER BY prnts_id, node_id;" + + try: + db = connect_db() + category_list = db.select(category_query)[0] + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + result = {"result": 1, "errorMessage": "", "data": category_list} + return result diff --git a/API-SERVICE/ApiList/meta/getCategoryNmCount.py b/API-SERVICE/ApiList/meta/getCategoryNmCount.py new file mode 100644 index 00000000..655b0371 --- /dev/null +++ b/API-SERVICE/ApiList/meta/getCategoryNmCount.py @@ -0,0 +1,29 @@ +from typing import Dict +from ELKSearch.Manager.manager import ElasticSearchManager +from ELKSearch.Utils.elasticsearch_utils import make_query +from Utils.CommonUtil import get_exception_info +from ELKSearch.Utils.database_utils import get_config +from ApiService.ApiServiceConfig import config + + +def api(nms) -> Dict: + data_dict = {} + key = "re_ctgry" + els_config = get_config(config.root_path, "config.ini")[config.db_type[:-3]] + try: + ctgry_nm_list = nms.split(",") + es = ElasticSearchManager(**els_config) + for c_id in ctgry_nm_list: + c_v = c_id.replace(" ","") + cnt_query = make_query("query", "match_phrase", {key: c_v}) + cnt = es.conn.count(index=es.index, body=cnt_query)["count"] + data_dict[c_id.replace(" ", "_")] = cnt + + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + pass + result = {"result": 1, "errorMessage": "", "data": data_dict} + + return result diff --git a/API-SERVICE/ApiList/meta/getCategoryTree.py b/API-SERVICE/ApiList/meta/getCategoryTree.py new file mode 100644 index 00000000..e60aa7cf --- /dev/null +++ b/API-SERVICE/ApiList/meta/getCategoryTree.py @@ -0,0 +1,41 @@ +from typing import Dict +from ApiService.ApiServiceConfig import config +from Utils.CommonUtil import connect_db, make_res_msg, get_exception_info + + +def api() -> Dict: + get_category_list = "SELECT * FROM tb_category;" + + try: + db = connect_db() + category_list, _ = db.select(get_category_list) + + node_dict = {} + category_tree = {} + for category in category_list: + node_dict[category["node_id"]] = category["node_nm"] + category_tree[category["node_nm"]] = [] + + for category in category_list: + if node_dict.get(category["prnts_id"]): + parent_name = node_dict[category["prnts_id"]] + category_tree[parent_name].append(category["node_nm"]) + + result_category = {} + for category in category_tree["ROOT"]: + if category == "ROOT": + continue + result_category[category] = None + + for main_category, sub_category in category_tree.items(): + if sub_category: + if main_category == "ROOT": + continue + result_category[main_category] = sub_category + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + result = make_res_msg(1, "", result_category, []) + + return result diff --git a/API-SERVICE/ApiList/meta/getElsBizMetaList.py b/API-SERVICE/ApiList/meta/getElsBizMetaList.py new file mode 100644 index 00000000..b00527eb --- /dev/null +++ b/API-SERVICE/ApiList/meta/getElsBizMetaList.py @@ -0,0 +1,80 @@ +from typing import Dict +from datetime import datetime +from ELKSearch.Manager.manager import ElasticSearchManager +from ELKSearch.Utils.model import InputModel +from ELKSearch.Utils.elasticsearch_utils import make_query, base_search_query +from ELKSearch.Utils.database_utils import get_config +from Utils.CommonUtil import get_exception_info +from Utils.SearchUtil import search_count, ckan_query +from ApiService.ApiServiceConfig import config + + +def extra_filter(option_list): + els_katech_option = ["ctgry", "data_shap", "data_prv_desk"] + for item in option_list: + for col in els_katech_option: + if col in item.field: + item.field.append(f"re_{col}") + index = item.field.index(col) + del item.field[index] + item.keywords = [v.replace(" ", "") for v in item.keywords] + + tmp = [] + for field in item.field: + tmp.append(field) + if field in ["data_nm", "data_desc"]: + col = field + ".korean_analyzer" + tmp.append(col) + item.field = tmp + + return option_list + + +def api(input: InputModel) -> Dict: + from_ = input.from_ - 1 + index = "biz_meta" + els_config = get_config(config.root_path, "config.ini")[config.db_type[:-3]] + try: + if input.chk and len(input.searchOption): + with open( + f"{config.root_path}/log/{config.category}/{datetime.today().strftime('%Y%m%d')}_search.log", + "a", + ) as fp: + for search in input.searchOption: + fp.write(f"{str(search.keywords)}\n") + + es = ElasticSearchManager(page=from_, size=input.size, index=index, **els_config) + es.set_sort(input.sortOption) + + ############ search option ############ + action = "query" + sub_action = "must" + input.searchOption = extra_filter(input.searchOption) + query_dict = base_search_query(action, sub_action, input.searchOption) + + # ############ filter option ############ + sub_action = "filter" + input.filterOption = extra_filter(input.filterOption) + item_dict = base_search_query(action, sub_action, input.filterOption) + query_dict.update(item_dict) + search_query = make_query(action, "bool", query_dict) + es.body.update(search_query) + + # ############ sort option ############ + sort_list = [{item.field: item.order} for item in input.sortOption] + es.set_sort(sort_list) + search_data = es.search(input.resultField) + + data_dict = search_count(es, item_dict, query_dict) + ckan_dict = ckan_query(input.searchOption) + + data_dict["overseaCount"] = search_count(es, {'filter': []}, ckan_dict)["overseaCount"] + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + search_list = [data["_source"] for data in search_data["hits"]["hits"]] + data_dict["searchList"] = search_list + result = {"result": 1, "errorMessage": "", "data": data_dict} + + return result diff --git a/API-SERVICE/ApiList/meta/getElsCkanList.py b/API-SERVICE/ApiList/meta/getElsCkanList.py new file mode 100644 index 00000000..4f29ede7 --- /dev/null +++ b/API-SERVICE/ApiList/meta/getElsCkanList.py @@ -0,0 +1,48 @@ +from typing import Dict +from datetime import datetime +from ELKSearch.Manager.manager import ElasticSearchManager +from ELKSearch.Utils.model import InputModel +from ELKSearch.Utils.elasticsearch_utils import make_query, base_search_query +from ELKSearch.Utils.database_utils import get_config +from Utils.CommonUtil import get_exception_info +from Utils.SearchUtil import search_count, ckan_query +from ApiService.ApiServiceConfig import config + + +def api(input: InputModel) -> Dict: + from_ = input.from_ - 1 + els_config = get_config(config.root_path, "config.ini")[config.db_type[:-3]] + index = "ckan_data" + try: + if input.chk and len(input.searchOption): + with open( + f"{config.root_path}/log/{config.category}/{datetime.today().strftime('%Y%m%d')}_search.log", + "a", + ) as fp: + for search in input.searchOption: + fp.write(f"{str(search.keywords)}\n") + + es = ElasticSearchManager(page=from_, size=input.size, index=index, **els_config) + es.set_sort(input.sortOption) + + ############ search option ############ + query_dict = ckan_query(input.searchOption) + search_query = make_query("query","bool", query_dict) + es.body.update(search_query) + + # ############ sort option ############ + sort_list = [{item.field: item.order} for item in input.sortOption] + es.set_sort(sort_list) + search_data = es.search(input.resultField) + + data_dict = search_count(es, {'filter': []}, query_dict) + + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + search_list = [data["_source"] for data in search_data["hits"]["hits"]] + data_dict["searchList"] = search_list + result = {"result": 1, "errorMessage": "", "data": data_dict} + + return result diff --git a/API-SERVICE/ApiList/meta/getPrefixBizMeta.py b/API-SERVICE/ApiList/meta/getPrefixBizMeta.py new file mode 100644 index 00000000..d6740ce8 --- /dev/null +++ b/API-SERVICE/ApiList/meta/getPrefixBizMeta.py @@ -0,0 +1,58 @@ +from typing import Dict +from fastapi.logger import logger +from pydantic import BaseModel +from ELKSearch.Manager.manager import ElasticSearchManager +from Utils.CommonUtil import get_exception_info +from ELKSearch.Utils.database_utils import get_config +from ApiService.ApiServiceConfig import config + + +class Prefix(BaseModel): + index: str + size: int + fields: list + query: str + + +def api(input: Prefix) -> Dict: + """ + Auto Complete data_nm + DB의 Like 검색과 유사함 + :param keyword: type dict, ex) {"data_name" : "테"} + :return: + """ + if not len(input.fields): + input.fields = ["data_nm"] + els_config = get_config(config.root_path,"config.ini")[config.db_type[:-3]] + try: + els_config["index"] = ["biz_meta","ckan_data"] + es = ElasticSearchManager(**els_config) + es.size = input.size + input.query = f"(*{input.query}*)" + del input.index + del input.size + search_query = {"query_string": input.dict()} + logger.info(search_query) + + body = { + "query": { + "bool": { + "must": [search_query] + } + } + } + es.body = body + logger.info(es.body) + prefix_data = es.search(input.fields) + logger.info(prefix_data) + + if not len(prefix_data): + return {"result": 1,"data": []} + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + prefix_data = [data["_source"]["data_nm"] for data in prefix_data["hits"]["hits"]] + result = {"result": 1, "errorMessage": "", "data": prefix_data} + + return result diff --git a/API-SERVICE/ApiList/meta/insertElsBizMeta.py b/API-SERVICE/ApiList/meta/insertElsBizMeta.py new file mode 100644 index 00000000..5f3a5716 --- /dev/null +++ b/API-SERVICE/ApiList/meta/insertElsBizMeta.py @@ -0,0 +1,55 @@ +import uuid +from typing import Dict +from Utils.CommonUtil import get_exception_info +from pydantic import BaseModel +from ELKSearch.Manager.manager import ElasticSearchManager + + +class BizMeta(BaseModel): + biz_dataset_id: str + src_url: str + kywrd: str + ctgry: str + data_updt_cyc: str + adm_dep: str + admr_nm: str + file_read_authority: str + retv_num: str + data_desc: str + data_prv_desk: str + license: str + lang: str + adm_dep_hp: str + data_nm: str + updt_nxt_dt: str + updt_dt: str + reg_dt: str + reg_user: str + amd_user: str + reg_date: str + amd_date: str + data_shap: str + data_srttn: str + data_limit: str + othr_use_notes: str + data_eng_nm: str + downl_num: str + attnt_data_num: str + share_num: str + contents: str + + +def api(biz_meta_data: BizMeta) -> Dict: + uid = uuid.uuid4() + try: + es = ElasticSearchManager() + biz_meta_data = biz_meta_data.dict() + biz_meta_data["biz_dataset_id"] = uid + es.insert(biz_meta_data, biz_meta_data["biz_dataset_id"]) + + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + result = {"result": 1, "errorMessage": ""} + return result diff --git a/API-SERVICE/ApiList/meta/metaInsert.py b/API-SERVICE/ApiList/meta/metaInsert.py new file mode 100644 index 00000000..6eb4a1a6 --- /dev/null +++ b/API-SERVICE/ApiList/meta/metaInsert.py @@ -0,0 +1,50 @@ +from typing import Dict +from ApiService.ApiServiceConfig import config +from Utils.CommonUtil import connect_db, get_exception_info, convert_data +import os +import base64 + + +def print_files_in_dir(root_dir, file_name): + files = os.listdir(root_dir) + print(len(files)) + for file in files: + path = os.path.join(root_dir, file, file_name) + print(path) + + +def api() -> Dict: + eda_path = "/Users/cbc/Downloads/EDA_FILE" + try: + db = connect_db() + files = os.listdir(eda_path) + id_cnt = 0 + for index, rid in enumerate(files): + print(index) + path = os.path.join(eda_path, rid, "profile_report_merged.html") + with open(path, "rb") as fd: + data = fd.read() + data_base64 = base64.b64encode(data).decode("ascii") + insert_data = f"data:text/html;base64,{data_base64}" + print(f"LEN : {len(insert_data)}") + # print(insert_data) + # query = f'UPDATE meta_temp SET file_data = {convert_data(insert_data)}\ + # WHERE gimi9_rid = {convert_data(rid)}' + select_query = f"select biz_dataset_id from meta_temp where gimi9_rid = {convert_data(rid)}" + select_res, _ = db.select(select_query) + if select_res: + biz_dataset_id = select_res[0]["biz_dataset_id"] + query = f"INSERT INTO tb_meta_html (biz_dataset_id, file_data) VALUES ({convert_data(biz_dataset_id)}, {convert_data(insert_data)});" + db.execute(query) + else: + id_cnt += 1 + print(f"id_cnt : {id_cnt}") + + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + + result = {"result": 1, "errorMessage": ""} + + return result diff --git a/API-SERVICE/ApiList/meta/updateCategory.py b/API-SERVICE/ApiList/meta/updateCategory.py new file mode 100644 index 00000000..73bbb3e2 --- /dev/null +++ b/API-SERVICE/ApiList/meta/updateCategory.py @@ -0,0 +1,27 @@ +import uuid +from ApiService.ApiServiceConfig import config +from Utils.CommonUtil import connect_db, get_exception_info, convert_data +from pydantic import BaseModel +from typing import Dict + + +class UpdateCategory(BaseModel): + node_id: str + node_nm: str + + +def api(update: UpdateCategory) -> Dict: + query = f"UPDATE tb_category\ + SET prnts_id = {convert_data(uuid.uuid4())},\ + node_id = {convert_data(update.node_id)},\ + node_nm = {convert_data(update.node_nm)}\ + WHERE node_id = {convert_data(update.node_id)};" + try: + db = connect_db() + db.execute(query) + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + result = {"result": 1, "errorMessage": ""} + return result diff --git a/API-SERVICE/ApiList/meta/updateElsBizMeta.py b/API-SERVICE/ApiList/meta/updateElsBizMeta.py new file mode 100644 index 00000000..293cb8f7 --- /dev/null +++ b/API-SERVICE/ApiList/meta/updateElsBizMeta.py @@ -0,0 +1,29 @@ +from typing import Dict +from pydantic import BaseModel +from Utils.CommonUtil import get_exception_info, connect_db, convert_data +from ELKSearch.Manager.manager import ElasticSearchManager +from ELKSearch.Utils.database_utils import get_config +from ELKSearch.Utils.elasticsearch_utils import data_process +from ApiService.ApiServiceConfig import config + + +class UpdateData(BaseModel): + biz_dataset_id: str + + +def api(input: UpdateData) -> Dict: + els_config = get_config(config.root_path,"config.ini")[config.db_type[:-3]] + query = f"SELECT * FROM v_biz_meta_info WHERE biz_dataset_id = {convert_data(input.biz_dataset_id)}" + try: + db = connect_db() + es = ElasticSearchManager(**els_config) + biz_data = db.select(query)[0][0] + + els_dict = data_process(biz_data)["_source"] + es.conn.index(index=es.index,body=els_dict,id=input.biz_dataset_id) + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + result = {"result": 1, "errorMessage": ""} + return result diff --git a/API-SERVICE/ApiList/meta/updateElsBizMetaBulk.py b/API-SERVICE/ApiList/meta/updateElsBizMetaBulk.py new file mode 100644 index 00000000..919f18df --- /dev/null +++ b/API-SERVICE/ApiList/meta/updateElsBizMetaBulk.py @@ -0,0 +1,34 @@ +from typing import Dict +from elasticsearch import helpers +from Utils.CommonUtil import get_exception_info, connect_db +from ELKSearch.Manager.manager import ElasticSearchManager +from ELKSearch.Utils.database_utils import get_config +from ELKSearch.Utils.elasticsearch_utils import data_process +from ApiService.ApiServiceConfig import config + + +def api() -> Dict: + """ + bulk로 업데이트 할 때 timeout이 발생하는 이슈가 있음 + """ + els_config = get_config(config.root_path,"config.ini")[config.db_type[:-3]] + # bulk_meta_item = list() + db_query = f"SELECT * FROM v_biz_meta_info WHERE status = 'D'" + + try: + db = connect_db() + es = ElasticSearchManager(**els_config) + + meta_wrap_list = db.select(db_query)[0] + for meta_wrap in meta_wrap_list: + els_dict = data_process(meta_wrap) + es.insert(els_dict["_source"],meta_wrap["biz_dataset_id"]) + # bulk_meta_item.append(els_dict) + # helpers.bulk(es.conn, bulk_meta_item, index=es.index) + + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + result = {"result": 1, "errorMessage": ""} + return result diff --git a/API-SERVICE/ApiList/sitemng/__init__.py b/API-SERVICE/ApiList/sitemng/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/API-SERVICE/ApiList/sitemng/getCodeInfo.py b/API-SERVICE/ApiList/sitemng/getCodeInfo.py new file mode 100644 index 00000000..b7d0e961 --- /dev/null +++ b/API-SERVICE/ApiList/sitemng/getCodeInfo.py @@ -0,0 +1,32 @@ +from typing import Dict +from ApiService.ApiServiceConfig import config +from Utils.CommonUtil import connect_db, get_exception_info, convert_data + + +def api(groupId) -> Dict: + get_code_info_query = f"SELECT code_id, code_nm, data_1, data_2 \ + FROM tb_code_detail \ + WHERE code_group_id = {convert_data(groupId)};" + try: + db = connect_db() + code_list = db.select(get_code_info_query) + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + code_info = [] + if len(code_list[0]): + code_info = [ + { + "code_id": code_detail["code_id"], + "code_nm": code_detail["code_nm"], + "data_1": code_detail["data_1"], + "data_2": code_detail["data_2"], + } + for code_detail in code_list[0] + ] + + body = {"list": code_info} + result = {"result": 1, "errorMessage": "", "data": body} + + return result diff --git a/API-SERVICE/ApiList/sitemng/getCodeList.py b/API-SERVICE/ApiList/sitemng/getCodeList.py new file mode 100644 index 00000000..74a01ff3 --- /dev/null +++ b/API-SERVICE/ApiList/sitemng/getCodeList.py @@ -0,0 +1,52 @@ +from typing import Dict +from ApiService.ApiServiceConfig import config +from Utils.CommonUtil import connect_db, get_exception_info, convert_data +from fastapi.logger import logger + + +def api(perPage: int, curPage: int, gropId: str, keyword: str = "") -> Dict: + + curPage = curPage - 1 + total_cnt_query = "SELECT count(*) AS cnt FROM tb_code_detail" + code_list_query = ( + "SELECT *, row_number () OVER (ORDER BY {0}) AS rowNo FROM tb_code_detail" + ) + + try: + db = connect_db() + common_condition = f" WHERE code_group_id = {convert_data(gropId)}" + code_list_query = code_list_query + common_condition + total_cnt_query = total_cnt_query + common_condition + + if len(keyword): + # keyword 검색 조건 추가 + order_condition = f"code_nm SIMILAR to '%{keyword}%' DESC" + search_condition = f"AND code_nm LIKE '%{keyword}%'" + + code_list_query = code_list_query + search_condition + total_cnt_query = total_cnt_query + search_condition + code_list_query = code_list_query.format(order_condition) + else: + order_condition = "reg_date ASC" + code_list_query = code_list_query.format(order_condition) + + paging_condition = f" LIMIT {perPage} OFFSET ({perPage} * {curPage})" + code_list_query = code_list_query + paging_condition + + code_list = db.select(code_list_query) + total_cnt = db.select(total_cnt_query) + + except Exception: + except_name = get_exception_info() + result = {"result": 0, "errorMessage": except_name} + else: + code_info = [] + if len(code_list[0]): + code_info = [ + {"code_id": code_detail["code_id"], "code_nm": code_detail["code_nm"]} + for code_detail in code_list[0] + ] + + body = {"totalcount": total_cnt[0][0]["cnt"], "list": code_info} + result = {"result": 1, "errorMessage": "", "data": body} + return result diff --git a/API-SERVICE/ApiService/ApiService.py b/API-SERVICE/ApiService/ApiService.py new file mode 100644 index 00000000..a853b89f --- /dev/null +++ b/API-SERVICE/ApiService/ApiService.py @@ -0,0 +1,27 @@ +import importlib.util +from fastapi.logger import logger +from fastapi import APIRouter +from ApiService.ApiServiceConfig import config + + +class ApiService: + def __init__(self) -> None: + self.router = APIRouter() + self.set_route() + + def set_route(self) -> None: + for api_name, api_info in config.api_config.items(): + if config.category == api_info["sub_dir"]: + module_path = ( + f'{config.root_path}/ApiList/{api_info["sub_dir"]}/{api_name}.py' + ) + module_name = "api" + spec = importlib.util.spec_from_file_location(module_name, module_path) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + self.router.add_api_route( + f'{api_info["url"]}', + module.api, + methods=[api_info["method"]], + tags=[f'service [ {api_info["sub_dir"]} ]'], + ) diff --git a/API-SERVICE/ApiService/ApiServiceConfig.py b/API-SERVICE/ApiService/ApiServiceConfig.py new file mode 100644 index 00000000..65a2601c --- /dev/null +++ b/API-SERVICE/ApiService/ApiServiceConfig.py @@ -0,0 +1,31 @@ +from typing import Dict +from psycopg2 import pool +from passlib.context import CryptContext + + +class ApiServiceConfig: + root_path: str + + category: str + + db_type: str + db_info: Dict + + remote_info: Dict + + server_host: str + server_port: int + + api_config: Dict + + secret_info: Dict + user_info: Dict + pwd_context: CryptContext + email_auth: Dict + + conn_pool: pool.SimpleConnectionPool + + keycloak_info: Dict + + +config = ApiServiceConfig diff --git a/API-SERVICE/ApiService/__init__.py b/API-SERVICE/ApiService/__init__.py new file mode 100644 index 00000000..9213060d --- /dev/null +++ b/API-SERVICE/ApiService/__init__.py @@ -0,0 +1,2 @@ +from .ApiService import * +from .ApiServiceConfig import * diff --git a/API-SERVICE/ConnectManager/PostgresManager.py b/API-SERVICE/ConnectManager/PostgresManager.py new file mode 100644 index 00000000..3c2154b4 --- /dev/null +++ b/API-SERVICE/ConnectManager/PostgresManager.py @@ -0,0 +1,54 @@ +import psycopg2 +from typing import List, Dict, Tuple, Any +from ApiService.ApiServiceConfig import config +from fastapi.logger import logger + + +class PostgresManager: + def __init__(self) -> None: + self.conn = self.connect() + self.cursor = self.conn.cursor() + + def connect(self): + conn = config.conn_pool.getconn() + + logger.info("PostgresManager Connect.") + return conn + + def execute(self, sql: str) -> None: + self.cursor.execute(sql) + self.conn.commit() + logger.info(f"PostgresManager Execute Result. ({sql})") + + def multiple_excute(self, sql_list: list) -> None: + try: + for index, sql in enumerate(sql_list): + logger.info(f"PostgresManager Multiple Execute. ({index}. {sql})") + self.cursor.execute(sql) + self.conn.commit() + except (Exception, psycopg2.DatabaseError): + self.conn.rollback() + raise psycopg2.DatabaseError + + def select( + self, sql: str, count: int = None + ) -> Tuple[List[Dict[Any, Any]], List[Any]]: + self.execute(sql) + column_names = [desc[0] for desc in self.cursor.description] + if count is None: + rows = self.cursor.fetchall() + else: + rows = self.cursor.fetchmany(count) + # logger.info(f'PostgresManager Select Execute. ({sql})') + + result = [] + for row in rows: + result.append(dict(zip(column_names, row))) + return result, column_names + + def commit(self): + self.conn.commit() + + def __del__(self) -> None: + self.cursor.close() + config.conn_pool.putconn(self.conn) diff --git a/API-SERVICE/ConnectManager/__init__.py b/API-SERVICE/ConnectManager/__init__.py new file mode 100644 index 00000000..b5c0391e --- /dev/null +++ b/API-SERVICE/ConnectManager/__init__.py @@ -0,0 +1 @@ +from .PostgresManager import * diff --git a/API-SERVICE/ELKSearch/.gitignore b/API-SERVICE/ELKSearch/.gitignore new file mode 100644 index 00000000..fd1b0921 --- /dev/null +++ b/API-SERVICE/ELKSearch/.gitignore @@ -0,0 +1,2 @@ +/.idea/ +*.iml \ No newline at end of file diff --git a/API-SERVICE/ELKSearch/Manager/__init__.py b/API-SERVICE/ELKSearch/Manager/__init__.py new file mode 100644 index 00000000..b6e690fd --- /dev/null +++ b/API-SERVICE/ELKSearch/Manager/__init__.py @@ -0,0 +1 @@ +from . import * diff --git a/API-SERVICE/ELKSearch/Manager/manager.py b/API-SERVICE/ELKSearch/Manager/manager.py new file mode 100644 index 00000000..b0cd39ad --- /dev/null +++ b/API-SERVICE/ELKSearch/Manager/manager.py @@ -0,0 +1,79 @@ +from typing import Dict, Any, Union +from elasticsearch import Elasticsearch +from ELKSearch.Utils.elasticsearch_utils import make_query + + +class ElasticSearchManager: + def __init__( + self, + host: str = "192.168.101.44", + port: str = "39200", + page: int = 0, + size: int = 10, + index: str = "biz_meta", + ): + """ + set elasticsearch connect && DSL query setting function + :param host: elasticsearch host ip addr, default = localhost + :param port: elasticsearch ip port number, default = 9200 + :param index: + :param page: page, size * page , elasticsearch default value = 0 + :param size: 아이템 개수 , elasticsearch default value = 10 + """ + self.host = host + self.port = port + self.size = size + self.index = index + self.cur_from = size * page + self.conn = self.connect() + self.body = self.set_default_option() + + def connect(self) -> Elasticsearch: + es = Elasticsearch(f"http://{self.host}:{self.port}", timeout=30, max_retries=10, retry_on_timeout=True) + return es + + def set_default_option(self) -> Dict[Any, Any]: + # 유지 보수를 위해 model 적용 안 함 + self.body = { + "sort": [], + } + return self.body + + def set_sort(self, sort: list) -> None: + self.body["sort"] = sort + + def set_pagination(self, size: int, from_: int) -> None: + self.size = size + self.cur_from = size * from_ + + def search(self, source=...): + return self.conn.search( + index=self.index, + body=self.body, + from_=self.cur_from, + size=self.size, + _source=source, + ) + + def insert(self, body: dict, doc_id: str) -> None: + return self.conn.index(index=self.index, body=body, id=doc_id) + + def update(self, body: dict, doc_id: str): + return self.conn.update(index=self.index, id=doc_id, body=body) + + def delete(self, field: str, data: Union[str, list]): + """ + 단수 : { query: { term: _id}} + 복수 : { query : { term : []}} + :param field: data type str, elasticsearch index _source name + :param data: data type str or list + """ + delete_data = {field: data} + delete_command = make_query("query", "term", delete_data) + return self.conn.delete_by_query(index=self.index, body=delete_command) + + def prefix(self, keyword: dict, source=...): + prefix_query = make_query("query", "prefix", keyword) + return self.conn.search( + index=self.index, body=prefix_query, size=self.size, _source=source + ) diff --git a/API-SERVICE/ELKSearch/README.md b/API-SERVICE/ELKSearch/README.md new file mode 100644 index 00000000..15643c0c --- /dev/null +++ b/API-SERVICE/ELKSearch/README.md @@ -0,0 +1,2 @@ +# pyes +python elasticsearch controller diff --git a/API-SERVICE/ELKSearch/Utils/__init__.py b/API-SERVICE/ELKSearch/Utils/__init__.py new file mode 100644 index 00000000..b6e690fd --- /dev/null +++ b/API-SERVICE/ELKSearch/Utils/__init__.py @@ -0,0 +1 @@ +from . import * diff --git a/API-SERVICE/ELKSearch/Utils/database_utils.py b/API-SERVICE/ELKSearch/Utils/database_utils.py new file mode 100644 index 00000000..f19188d9 --- /dev/null +++ b/API-SERVICE/ELKSearch/Utils/database_utils.py @@ -0,0 +1,105 @@ +import argparse +import configparser +from psycopg2 import pool +from typing import List, Dict, Tuple, Any +from ELKSearch.Manager.manager import ElasticSearchManager + + +class ElsSearchConfig: + root_path: str + category: str + + db_type: str + db_info: Dict + + els_type: str + els_info: Dict + check: bool + + conn_pool: pool.SimpleConnectionPool + es: ElasticSearchManager + + +config = ElsSearchConfig + + +def get_config(root_path, config_name: str): + ano_cfg = {} + + conf = configparser.ConfigParser() + config_path = root_path + f"/ELKSearch/conf/{config_name}" + conf.read(config_path, encoding="utf-8") + for section in conf.sections(): + ano_cfg[section] = {} + for option in conf.options(section): + ano_cfg[section][option] = conf.get(section, option) + + return ano_cfg + + +def parser_params() -> Any: + parser = argparse.ArgumentParser() + parser.add_argument("--category", default="local") + parser.add_argument("--db_type", default="local") + parser.add_argument("--check", default="True") + + return parser.parse_args() + + +def prepare_config(root_path) -> None: + args = parser_params() + config.root_path = root_path + config.category = args.category + + db_config = get_config(root_path, "db_config.ini") + els_config = get_config(root_path, "config.ini") + + config.els_type = args.category + config.els_info = els_config[args.category] + if config.category != "email": + config.es = ElasticSearchManager(**config.els_info) + config.check = args.check + + config.db_type = f"{args.db_type}_db" + config.db_info = db_config[config.db_type] + config.conn_pool = make_connection_pool(config.db_info) + + +def make_connection_pool(db_info): + conn_pool = pool.SimpleConnectionPool( + 1, + 20, + user=db_info["user"], + password=db_info["password"], + host=db_info["host"], + port=db_info["port"], + database=db_info["database"], + options=f'-c search_path={db_info["schema"]}', + connect_timeout=10, + ) + return conn_pool + + +def connect_db(): + conn = config.conn_pool.getconn() + return conn + + +def execute(conn, cursor, sql) -> None: + cursor.execute(sql) + conn.commit() + + +def select(conn, sql: str, count: int = None) -> Tuple[List[Dict[Any, Any]], List[Any]]: + cursor = conn.cursor() + execute(conn, cursor, sql) + column_names = [desc[0] for desc in cursor.description] + if count is None: + rows = cursor.fetchall() + else: + rows = cursor.fetchmany(count) + + result = [] + for row in rows: + result.append(dict(zip(column_names, row))) + return result, column_names diff --git a/API-SERVICE/ELKSearch/Utils/elasticsearch_utils.py b/API-SERVICE/ELKSearch/Utils/elasticsearch_utils.py new file mode 100644 index 00000000..ec82df1a --- /dev/null +++ b/API-SERVICE/ELKSearch/Utils/elasticsearch_utils.py @@ -0,0 +1,66 @@ +import re +from typing import Dict, Any +from datetime import datetime + + +def is_space(text: str) -> int: + if " " in text: + result = 1 + else: + result = 0 + return result + + +def make_query(operator, field, value) -> Dict[Any, Any]: + query = {operator: {field: value}} + return query + + +def base_search_query(action: str, sub_action: str, item_list: list) -> Dict: + item_dict = {sub_action: []} + + for item in item_list: + if len(item.keywords): + words = " ".join(item.keywords).strip() + + # field div + if 1 < len(item.field): + key = "multi_match" + detail = { + "fields": item.field, + "operator": item.operator, + "type": "phrase_prefix", + } + query = make_query(key, action, words) + query[key].update(detail) + else: + key = "match" + detail = {action: words, "operator": item.operator} + query = make_query(key, item.field[0], detail) + # query 추가 + item_dict[sub_action].append(query) + else: + continue + return item_dict + + +def default_process(els_dict, data): + els_dict["_id"] = data["biz_dataset_id"] + els_dict["_source"] = data + els_dict["_source"]["biz_dataset_id"] = data["biz_dataset_id"] + return els_dict + + +def data_process(data): + # D-Ocean Project Function + els_dict = dict() + data["re_ctgry"] = re.sub("[ ]", "", str(data["ctgry"])) + data["re_data_shap"] = re.sub("[ ]", "", str(data["data_shap"])) + data["re_data_prv_desk"] = re.sub("[ ]", "", str(data["data_prv_desk"])) + + # test 환경에서 updt_dt가 None값인 경우가 있음 + if "updt_dt" in data.keys() and data["updt_dt"] and len(data["updt_dt"]) > 25: + data["updt_dt"] = datetime.strptime(data["updt_dt"], "%Y-%m-%d %H:%M:%S.%f") + + els_dict = default_process(els_dict, data) + return els_dict diff --git a/API-SERVICE/ELKSearch/Utils/model.py b/API-SERVICE/ELKSearch/Utils/model.py new file mode 100644 index 00000000..9a79ced4 --- /dev/null +++ b/API-SERVICE/ELKSearch/Utils/model.py @@ -0,0 +1,24 @@ +from pydantic import BaseModel, Field +from typing import List, Union + + +class ConfigOption(BaseModel): + field: Union[list, str] + keywords: list + operator: str + + +class SortOption(BaseModel): + field: str + order: str + + +class InputModel(BaseModel): + chk: bool = False + index: str = "biz_meta" + from_: int = Field(1, alias="from") + size: int = 10 + resultField: list = [] + sortOption: List[SortOption] = [] + searchOption: List[ConfigOption] = [] + filterOption: List[ConfigOption] = [] diff --git a/API-SERVICE/ELKSearch/__init__.py b/API-SERVICE/ELKSearch/__init__.py new file mode 100644 index 00000000..781be988 --- /dev/null +++ b/API-SERVICE/ELKSearch/__init__.py @@ -0,0 +1,2 @@ +from .Manager import * +from .Utils import * diff --git a/API-SERVICE/ELKSearch/conf/bad_word.txt b/API-SERVICE/ELKSearch/conf/bad_word.txt new file mode 100644 index 00000000..8ff9da46 --- /dev/null +++ b/API-SERVICE/ELKSearch/conf/bad_word.txt @@ -0,0 +1,1532 @@ +넌씨눈 +개새끼 +따먹었어 +ㄷㅇㅂ +시펄 +빠구울 +쓰레기 새끼 +된장녀 +지껄이 +브랄 +십팔넘 +개씁년 +썅놈 +병크 +씨블 +졸좋 +새뀌 +찌랄 +애미랄 +니씨브랄 +죽어버려 +개너미 +zaji +줘패 +버지뜨더 +쫀 맛 +창넘 +늬미 +개지랄 +니미기 +쇡끼 +닝기리 +ㅈ.ㄴ +버지물마셔 +자지넣자 +보지자지 +bozi +인간말종 +존잼 +씌벨 +존똑 +조오웃 +개쓰레기 +씨이붕 +샹년 +대갈 +십8 +똥구녁 +me췬 +쳐쑤셔박어 +슈우벌 +씨새발끼 +개후라들놈 +좆만한놈 +호냥년 +18ㅅㅔ키 +뒤졌 +개똥 +로 꺼.져 +띠불 +도랏 +성폭행 +ㅈ같네 +젖같 +족까 +젖까 +이기야 +씹새 +우미쑤셔 +조온만 +씨섹끼 +미핀놈 +한녀 +미튄 +똥 +씨벨 +싸가지없 +시팔년 +왕버지 +빠가니 +ㅈ1랄 +허졉 +족까내 +게에가튼 +샛기 +띠블넘 +누나강간 +씨바알 +개같 +존나아 +노무노무 +미친구멍 +그1켬 +뻐큐 +개샛기 +뼝신 +좋오웃 +씹창 +씨입새에 +허접 +G랄 +촌씨브라리 +개소리 +보지따먹기 +빨치산 +ㅄ +시이붕 +보지녀 +허벌자식 +샊히 +씹탱 +슈1발 +너거애비 +좃넘 +조오지 +씨블년 +새키 +사까시 +걸레년 +애애무 +엠븽신 +좃대가리 +병신세리 +십지랄 +쳐- +애미 +sex해 +ㅎㅃ +시바앙 +빠가새 +내조지 +병신 +십자슥 +공지 +뒤지길 +18세ㅋㅣ +똘아이 +shit +닳은년 +젓떠 +존쎼 +빻은 +눈깔파 +오랄 +졏같 +졸귀 +존나게 +싸물어 +처먹 +벌창 +사까쉬 +십자석 +니뿡 +이새끼 +보라니 +손놈 +뒤져요 +좃까리 +짱개 +후1빨 +ㅅㄲ들 +정액마셔 +에미 +시이풀 +쉬불 +씨뻘 +조개따조? +ㅂ크 +웅앵 +내버지 +십부랄 +로린 +개에걸래 +유우우방것 +새킈 +니미럴 +꼴랑 +버지쑤셔 +쉰내 +보짓물 +쌍눔 +지1뢰 +애무 +씨비 +쓰바 +시밸 +돌앗구만 +쓰래기같 +개쉐뀌 +돈년 +존트 +쓰발 +새끼라 +새1끼 +불알 +니믜 +존귘 +빠구리 +처먹고 +성괴 +친 ㅅㄲ +씨입새 +클리토리스 +친 놈 +느금마 +시방색희 +레1친 +바주카자지 +개쩌 +개.웃 +보전깨 +보지벌리 +쌍놈 +좃만이 +빠라 +미치ㄴ +럼들 +족같내 +존쎄 +ㅅㅐㄲㅣ +십팔새끼 +띠이발 +아오 ㅅㅂ +개같이 +꼴보기 +뒤져야 +꺼져요 +거지같은 +색희 +좇 +지뢀 +새끼 +미친~ +은년 +보지뚫어 +씨댕 +ㅈ리 +뒈져 +조온나 +씹덕 +젓물냄새 +망해라 +성교 +버어어지이 +미: 놈 +삼일한 +ㅈㄴ +똥꾸뇽 +노네들 +가슴주물럭 +개거얼레 +존예 +엿이나 +쎄리 +존내 +좃빠라라 +남미새 +친 년 +뒤질 +귀두 +헐렁보지 +돌앗나 +개독 +좁밥 +난자마셔 +창놈 +꺼지세요 +착짱죽짱 +유방쪼물딱 +달달이 +세키 +보지보지 +유방주물럭 +좃간년 +봉알 +가슴빨아 +보지빨어 +덜은새끼 +십세 +ㅆㅣ바 +미놈 +돈새끼 +시이팔 +딴년 +bitch +씨부렬 +18num +로꺼져 +섬숭이 +보지벌려 +잡것 +젖 같 +호모 +후장꽂아 +닥치세 +시벌 +노무현 +애미보지 +애미자지 +ㅅ1발 +애에미 +보지정액 +염뵹 +닥1 +돌은넘 +ㅆㅣㅂㅏ +설거지론 +쌔리 +엠창 +붕신 +자지구멍 +지뢰 +절라 +좋만 +ㅅ.ㅂ +퐁퐁남 +쎄끼 +시입세에 +쉬버 +내꺼핧아 +극혐 +18놈 +시이펄 +ㄱㅐㅅㅐㄲl +에에무 +허벌보지 +보적보 +시부럴 +상폐녀 +쓉새 +십탱구리 +쉬빡 +후우자앙 +조센징 +쉬이이 +혀로보지핧기 +씹쌔 +지1랄 +버지뚫어 +또라인 +니애뷔 +스벌 +개좆 +쌍년 +젓물 +나쁜새끼 +씹할 +시팔넘 +뒤진다 +한 년 +여자따묵기 +자기핧아 +ㅈ같 +사새끼 +지이랄 +덬 +적까 +개붕알 +개자지 +쉬붕 +시-발 +ㅆ1ㄺ +죵나 +좆년 +개애거얼래 +씨팍 +친노마 +후려 +허덥 +엠-창 +개떡 +가슴핧아 +십셰리 +구씹 +씹자지 +곱창났 +빠네 +디졌 +D쥐고 +씨바 +뚫린입 +조가튼 +걸레보지 +쪽1바리 +병-신 +병딱 +시이불 +따먹자 +ㅌㅓㄹㅐㄱㅣ +보지털 +막간년 +개씨발 +실프 +좃만한것 +십세리 +미친쉐이 +띠이이벌 +또오라아이 +개늠 +니뽕 +쓰레기새 +보지뜨더 +찍찍이 +씨불알 +쌍쌍보지 +젓까 +뻑유 +싑창 +씨밸 +ㅉ질한 +시팔놈 +취좃 +조오온니 +강간 +미친씨부랄 +유방쪼물럭 +새ㄲㅣ +잠지물마셔 +빙신 +걔잡지랄 +좀마니 +미:놈 +괘새끼 +돌은새끼 +조개마셔줘 +암캐년 +괴에가튼? +뚝배기 +색퀴 +좆새끼 +쉬이붕 +허젚 +조개벌려조 +뒷치기 +빠가냐 +운디네 +쪼녜 +자지 +존싫 +쉬박 +병맛 +시새발끼 +쌕스 +쥰트 +조개핧아줘? +씹미랄 +후라덜 +조까 +시탱 +엠빙신 +어미강간 +시빡 +꼴값 +십탱굴이 +슈ㅣ발 +ㅆㅂ +친놈 +졸웃 +좆만아 +십새 +걔섀 +호로자 +씨버럼 +어미쑤시자 +쒸8 +18ㅅㅔㅋㅣ +젓밥 +호로자슥 +좃물 +여어엄 +버따리자지 +늬믜 +펨코 +촌씨브랭이 +이따위 +느그매 +머리텅 +d쥐고 +보지털뽑아 +세엑스 +젖같은 +게지랄놈 +새1키 +뻨큐 +삐걱 +🚬 +개놈 +왕털보지 +썌끼 +입 털 +쿰.척 +시발새끼 +색끼 +년놈 +영자 +늑음 +미시친발 +개걸레 +가슴쪼물딱 +등신 +써글년 +씨뎅 +맛이간년 +쌍넘 +씨입세에 +애에비 +좃도 +버어지 +개넷 +시입세 +좃까 +개젓가튼넘 +미친쇄리 +주글년 +조개보지 +죽여불고 +개후라새끼 +죶 +씹물 +개간 +씹쌔끼 +항문쑤셔 +조개쑤셔줘 +쓰파 +섹쓰 +막대쑤셔줘? +씨벌년 +개 새끼 +ㅈㅏ위 +띠벌 +쉬밸년 +보지머리박기 +에에미 +존ㄴ나 +퐁퐁녀 +또-라-이 +죤내 +정신나갓 +시이벌 +허버리년 +드응신 +빠아구우리 +쉬팔 +쉬이팔 +jonna +게이 +시불 +버지벌료 +노알라 +상년 +좆나 +잡년 +따아알따아리 +슈벌 +뇌1텅 +새.끼 +그켬 +졸잼 +맛간년 +보슬아치 +개아기 +보지구녕 +거지같 +빠간가 +트랜스젠더 +대에가리 +글러먹 +첫빠 +빙신쉐이 +게젓 +쓰1레기 +씝창 +시팔 +좆빨아 +닥-쳐 +듣보 +떠라이 +me친 +씨부럴 +ㅅ1ㄲ +18세키 +시팔새끼 +존니 +십부럴 +잠지뚫어 +ㄱㅅㄲ +흐젚 +버어지이 +같은 새끼 +씹선 +믜친 +좆까 +씨박색히 +ㅆㅂㄹㅁ +스ㄹㅜ +애미잡년 +미친개 +졀리 +싸가지 없 +찌질 +병1신 +썅늠 +항문 +시방쉑희 +개떵 +jaji +존낙 +난자먹어 +개애걸래 +흐접 +좆같은새끼 +존버 +미치인 +보지핧아줄까 +외1퀴 +슨상님 +보징어 +공지사항 +띠블 +자지빨아 +허벌자지 +쥰나 +보지쥐어짜 +레친 +미친놈 +에무 +자지털 +버어어지 +수셔 +먹.끔 +에라이 퉤 +레기같 +유두빨어 +아아가리 +개씨블 +다꺼져 +쳐받는 +따알따리 +허어벌 +이그니스 +유우까압 +쉬이풀 +대애가리 +꼬추 +자지정개 +개작두년 +쫂 +조오우까튼 +미틴놈 +개씨발넘 +개씁자지 +도라이 +D지고 +버지따먹기 +쉑갸 +자지핧아줘 +쪽본 +조-ㅈ +쿰척 +조오까튼 +18새끼 +미티넘 +봊 +씹새끼 +개에거얼래 +젼나 +pennis +쳐발라 +보지핧아줘 +십창녀 +여엄병 +좆까라 +좃마무리 +18ㅅㅐㄲㅣ +스.루 +옘병 +페니스 +미틴년 +엠플레버 +미틴넘 +자박꼼 +시미발친 +호로잡 +막대쑤셔줘 +자지꽂아 +띠발뇬 +뻑큐 +쉽세 +주둥이 +에라이퉷 +jot같 +여미새 +d져 +고환 +내꺼빨아 +버짓물 +개부달 +걔잡년 +미친색 +창녀버지 +좆도 +졸귘 +지랄 +병닥 +젖탱이 +ㅆ1ㅂ +좃물냄새 +사까시이 +씨빠빠 +까내리 +정액먹어 +조개넓은년 +엠생 +버지벌려 +섹스하자 +병신셰리 +띠부우울 +씨박색희 +자지뜨더 +젓냄새 +씨이벌 +음경 +개후라년 +뇌 텅 +조옴마니 +염병 +앙기모띠 +개색뀌 +씨팍세끼 +어미따먹자 +기자레기 +자압것 +씹탱이 +씨발 +찌질이 +젖밥 +눈나 +젼낰 +십쉐끼 +젓마무리 +개에가튼 +엿먹어라 +그나물에 +미쳤니 +ㅆㅣ발 +개자식 +ㅆㅣ댕 +찎찎이 +씹자슥 +소음순 +지롤 +시바알 +씨입 +ㅁ친 +개지랄놈 +쉬펄 +씨뷰렬 +니애비 +내미럴 +ㅁㅣ췬 +penis +김치녀 +ㅅㅡ루 +친년 +ㅂㅊ +닥2 +빠큐 +보지에자지너 +씨걸 +왕털잠지 +정자핧아 +호로 +돌았네 +띠벨 +졸맛 +띠이벌 +조낸 +ㅆㅂㄻ +잠짓물마셔 +쌔끼 +개저가튼 +졸멋 +씨벌쉐이 +씨퐁뇬 +개념빠가 +띠빌 +빠굴 +따먹는다 +맘충 +젓만이 +서버 +쉬방 +씌댕 +돌았구만 +시벌탱 +왕털버지 +롬들 +파친 +븅신 +및힌 +그지 같 +존잘 +보지틀래기 +씨빨 +씹년 +개작두넘 +개나대 +뽀지 +쥰니 +보지물 +조개속물 +조개핧아줘 +애미좃물 +드으응신 +부왘 +내자지 +펑글 +유방핧어 +졀라 +잠지털 +후장뚫어 +좀쓰레기 +야dong +섀키 +앰창 +걸-레 +fuck +흐졉 +가슴쪼물락 +게세끼 +쓰바새끼 +ㅆㅣ8 +취ㅈ +씨퐁자지 +곱창나 +자지구녕 +개새기 +ㅆㄺ +새꺄 +씹세 +졸예 +꼭지 +조온니 +디져라 +띠이버얼 +씨븡 +큰보지 +개잡년 +쓰벌 +망돌 +그지같 +버지냄새 +젓가튼 +18년 +지이라알 +왜저럼 +쉐끼 +존1 +꼴깝 +가슴조물락 +개가튼뇬 +개저엇 +양아치 +조오또 +먹.금 +개넘 +돌으년 +외퀴 +니할애비 +빠가씹새 +괴가튼 +씨팔 +존나 +명존 +이 새끼 +먹끔 +엠뷩신 +조옷만 +쓰댕 +개가튼 +호로자식 +ㅇㅍㅊㅌ +따아알따리 +보지찌져 +덜떨어 +십녀 +씨이팔 +뒷잇치기 +d지고 +띠이바알 +계새끼 +ㅅ.ㄲ +잡놈 +더어엉신 +젗같 +씹보지 +개부랄 +조온 +버지털 +자지쓰레기 +yadong +꼴갑 +wlfkf +뚫린 입 +씹지랄 +조온마니 +뇌텅 +개고치 +더럽네 +시발년 +nflavor +왕자지 +띠브울 +좃가튼뇬 +개라슥 +염병할 +뒤치기 +여자ㄸㅏ묵기 +존 나 +니아비 +씹브랄 +기레기 +18nom +따먹을까 +구1씹 +오크 +배빵 +김대중 +버지썰어 +조우까튼 +개같은년 +색갸 +정자마셔 +화낭년 +발놈 +쥰내 +시박색히 +가슴만져 +뒤져라 +니아범 +보지핧아 +자지핧아 +닝기미 +시발 +조개벌려조? +개가튼년 +쥐랄 +죠낸 +세끼 +시이발 +졏 같 +쉬팍 +쉬이발 +운영자 +소추 +보지벌리자 +조개마셔줘? +mi친 +쉬이벌 +개보지년 +쪽바리 +강간한다 +dogbaby +미쳣네 +ㅂㅅ +죽여 버리고 +느금 +헤으응 +유방만져 +띠팔 +띠바 +요년 +염-병 +보지핧어 +촌씨브랑이 +굿보지 +도른 +미칭럼 +시1발 +존귀 +씨퐁 +유두핧어 +흉자 +따먹어야지 +대-가-리 +쪼다 +좃보지 +씌발 +뻐규 +좃냄새 +노친네 +씨바라 +미쳤나 +껒여 +미칀 +씹쉐뀌 +허벌년 +믜칀 +쇅끼 +쉬이이이 +씨1발 +시바시바 +쌍보지 +돌앗네 +동생강간 +쉬빨 +pussy +개마이 +개셈 +개년 +좀물 +머리 텅 +맛없는년 +동성애자 +십탱 +좆 +색키 +새77ㅣ +씹탱굴이 +보지구멍 +뷰웅신 +쒸발 +정액발사 +쎅쓰 +보지털어 +유방핧아 +정액짜 +가슴빨어 +개젓 +씨벌 +ㅅ발 +ㅅ루 +조깟 +쉽알넘 +짬지 +텐덕 +십팔 +씨8 +니년 +개잡지랄 +보지 +미1친 +사까아시 +씨팍새끼 +닥전 +보쥐 +젓대가리 +쪼다새끼 +ㅂㄹ +세엑쓰 +씨이발 +씨펄 +게부럴 +병신씨발 +보픈카 +씹팔넘 +미친구녕 +쯰질 +허좁 +미-친 +친구년 +쬰잘 +쬲 +띠부울 +씨븡새끼 +뼈큐 +닥쳐라 +좆만한새끼 +뇌-텅텅 +좃빠구리 +후라덜넘 +보지찢어 +씨댕이 +썅 +십세이 +미치누 +레기네 +존좋 +개걸래 +벌창같은년 +쪼까튼 +별창 +쒸댕 +조개쑤셔줘? +좆물 +찌1질 +종나 +거시기 +좋만한것 +빻았 +섹끼 +유방 +jazi +지-랄 +먹1금 +싹스 +자지빨어 +시바 +눈새 +씨발년 +지럴 +줬같은 +친ㅅㄲ +씨ㅂㅏ +노옴 +싸개 +좆먹어 +sibal +따먹어 +니미 +가슴핧어 +좆밥 +조오올라 +씹창녀 +젓같내 +조녜 +쉑쓰 +씌팔 +ㅅㄲ네 +로 꺼져 +쓰브랄쉽세 +mi쳤 +졸싫 +씹못 +쓰벨 +등-신 +펨베 +짱깨 +쌍-판 +🖕 +뇌피셜 +존마니 +좃만아 +쎄엑스 +나빼썅 +와꾸 +십떼끼 +게새끼 +닥후 +시미친발 +ㅆㄹㄱ +시박쉑히 +좃깟네 +씨발병신 +찝째끼 +시파 +핑끄 +자지핧어 +개보지 +ya동 +호로짜식 +띠이이발 +씨불 +버짓물마셔 +뽄새 +ㅁㅊ +꺼.지 +디지고 +빠굴이 +슈발 +씹 +대가리 +엿같 +개-새-끼 +boji +게늠 +졌같은 +좆같은놈 +개후라 +후장뚫어18세키 +띠펄 +십쉐 +엔플레버 +좃빠네 +버지빨어 +조오가튼 +색히 +쉬탱 +머갈 +미친ㅋ +존.나 +쌕쓰 +개씨발자슥 +붕알 +한년 +凸 +부랄 +섹스해 +에애무 +쳐먹 +닥쳐 +누보햄 +점물 +씨팍넘 +조개따조 +뒤지겠 +좃털 +게저엇 +쓰루 +뽄세 +ㅅㅍ +죽여뿌고 +ㅅ끼 +ㅉ +씹버지 +따먹었지 +게자식 +골빈 +써글 +핑프 +씨뱅가리 +쉬발 +또라이 +좃만한쉐이 +쎅스 +ㅅㅋ +쳐마 +미친넘 +잠지 +새끼야 +똥구뇽 +ㅂㅁㄱ +쉬이바 +니애미 +후1려 +아닥 +시키가 +유깝 +에에비 +대음순 +찌질한 +븅쉰 +같은새끼 +사까아시이 +보짓물마셔 +김여사 +조또 +항문수셔 +젓나 +시친발미 +씨발롬 +노무 +ㅎㅌㅊ +씌뎅 +씹뻐럴 +쒸펄 +정액핧아 +앰 +슈레기 +자지빨아줘 +딸달이 +sex +시팍 +버지구멍 +fuckyou +ㅇㅒ쁜 +이년 +빠아가 +먹금 +씹부랄 +존1나 +미친 새 +유우방 +화냥년 +걸래년 +빡새끼 +아오 시바 +sex하자 +쉬방새 +씨빡 +쪽발 +딸딸이 +에비 +미친 +떠어라이 +성교해 +저년 +개지랄넘 +죠온나 +여자ㄸㅏ먹기 +호좁 +씹빵구 +방점뱅 +존맛 +처먹을 +시발놈 +빙띤 +자지쑤셔 +지랼 +유방빨아 +좁빠라라 +왕잠지 +섹스 +씨파넘 +띠발 +씨볼탱 +짱꼴라 +자지박어 +창녀 +니아범? +보지빨아 +싸가지 +주길년 +유발조물락 +tlqkf +젓가튼쉐이 +창년벼지 +미틴것 +시팍새끼 +시바라지 +ㅈㄹ +버지핧아 +미췬 +짱골라 +미친년 +애자 +후장 +존웃 +뷰웅시인 +저엊 +쉬벌 +개저씨 +달딸이 +샊기 +쫀귀 +젓같은 +쫀맛 +ㅆㅣ +성교하자 +골1빈 +벵신 +씹팔 +빠가야로 +글러 먹 +십창 +씨이불 +눈깔 파 +니기미 +뽕알 +후.려 +시빨 +ㅆㅣ팍넘 +십버지 +창년 +오르가즘 +붜지 +빠아아라 +쉬이이이이 +애미씨뱅 +미친새 +저엇 +ㄱㅐㅈㅏ +미틴 +씨방세 +엑윽 +썅년 +개련 +짱께 +색스 +육갑 +걸레같은년 +떠어라아이 +여자따먹기 +후우장 +창남 +시댕이 +엄창 +18ㅅㅐ끼 +미친새끼 +정신나갔 +씨부랄 +샤발 +죽여버리고 +씨벌탱 +쉬이펄 +시뷰렬 +좇같 +시볼탱 +은새끼 +쉬이불 +나쁜 새끼 +쉽쌔 +개새 +닌기미 +씨입세 +미친쇠리 +돌았나 +런년 +즤랄 +아가리 +내미랄 +빠아구리 +씨가랭넘 +도랐 +씨가랭년 +자위 +입털 +쫓같 +멜리스 +존멋 +보지박어 +좃부랄 +ㅅㅌㅊ +쌔엑스 +시바류 +허벌 +쉬이방 +썅뇬 +작은보지 +터래기터래기 +뒤이치기 +자지뜯어 +뒤져야지 +애에무 +왕털자지 +쒸팔 +디질 +조올라 +정자먹어 +섹히 +보지물마셔 +버지빨아 +시뷰럴 +느그 +시부울 +쓰뎅 +me틴 +개불랄 +뇬 +개거얼래 +죤나 +풀발 +씨가랭놈 +쉬풀 +씨붕 +zazi +씹치 +마스터 +좃또 +에라이 퉷 +난자핧아 +ㅅ1ㅂ +호졉 +빠가십새 +따먹기 +니미랄 +뷩딱 +미친눔 +쉬이빨 +퍄퍄 +꽃휴 +쳐먹고 +뒤지고싶 +걸레핀년 +또오라이 +쫀1 +쑤셔 +씌8 +지 랄 +개씁블 +씨박쉑히 +좃 +ㅆㅣ뎅 +뷰우웅신 +아오시바 +개세 +정병 +씨브럴 +웅엥 +개섹 +보지에자지껴 +자지짤라 +캐럿닷컴 +골 빈 +디-질 +더러운년 +꼬라지 +더엉신 +띠풀 +병1크 +mi틴 +씨퐁보지 +씹귀 +둄마 +뇨온 +버지구녕 +좆만한년 +시방새 +씨퐁넘 +호로새끼 +유두 +조오오조 +세꺄 +깨쌔끼 +씹뽀지 +백보지 +허벌레 +호루자슥 +공알 +씨뷰럴 +새퀴 +보지벌료 +아오ㅅㅂ +내씨발 +극1혐 +애비 +씹자석 +시부렬 +시녀 +유우깝 +막대핧아줘 +꼴뵈기 +쓰렉 +개색휘 +후빨 +크리토리스 +이프리트 +십때끼 +좆털 +내잠지 +레기다 +개쓰래기 +게가튼 +시붕 +ㅅㅂ +야동 +씨방새 +뒤져버 +에라이퉤 +졸라 +주둥아리 +미띤 \ No newline at end of file diff --git a/API-SERVICE/ELKSearch/conf/config.ini b/API-SERVICE/ELKSearch/conf/config.ini new file mode 100644 index 00000000..0b592cc7 --- /dev/null +++ b/API-SERVICE/ELKSearch/conf/config.ini @@ -0,0 +1,23 @@ +# search API config +[commercial] +host = 10.10.10.62 +port = 39200 + +[local] +host = localhost +port = 9200 + +[test] +host = 10.10.10.62 +port = 39200 + +# els_update config +[meta] +host = 10.10.10.62 +port = 39200 +index = biz_meta + +[ckan] +host = 10.10.10.62 +port = 39200 +index = ckan_data \ No newline at end of file diff --git a/API-SERVICE/ELKSearch/conf/db_config.ini b/API-SERVICE/ELKSearch/conf/db_config.ini new file mode 100644 index 00000000..c6beb751 --- /dev/null +++ b/API-SERVICE/ELKSearch/conf/db_config.ini @@ -0,0 +1,39 @@ +[local_db] +host = localhost +port = 5432 +user = postgres +password = 0312 +database = dataportal +schema = meta + +[remote_db] +host = 10.10.20.60 +port = 5432 +user = dpmanager +password = hello.dp12#$ +database = dataportal +schema = meta + +[commercial_db] +host = 10.10.10.34 +port = 5432 +user = dpmanager +password = hello.dp12#$ +database = dataportal +schema = meta + +[test_db] +host = 192.168.100.126 +port = 25432 +user = dpme +password = hello.meta12#$ +database = dataportal +schema = meta + +[email_db] +host = 192.168.100.126 +port = 25432 +user = dpsi +password = hello.sitemng12#$ +database = dataportal +schema = sitemng \ No newline at end of file diff --git a/API-SERVICE/ELKSearch/conf/mapping.json b/API-SERVICE/ELKSearch/conf/mapping.json new file mode 100644 index 00000000..ff2c2117 --- /dev/null +++ b/API-SERVICE/ELKSearch/conf/mapping.json @@ -0,0 +1,205 @@ +{ + "settings": { + "queries.cache.enabled": "true", + "refresh_interval":"10s", + "max_shingle_diff": 10, + "analysis": { + "tokenizer": { + "nori_user_dic": { + "type": "nori_tokenizer", + "decompound_mode": "discard", + "user_dictionary": "user_dic.txt" + } + }, + "filter": { + "nori_pos": { + "type": "nori_part_of_speech", + "stoptags": [ + "E", "J", "SC", "SE", "SF", "SP", "SSC", "SSO", "SY", "VCN", "VCP", "VSV", "VX", "XPN", "XSA", "XSN", "XSV" + ] + }, + "synonym": { + "type": "synonym_graph", + "synonyms_path": "synonyms.txt" + }, + "stopwords": { + "type": "stop", + "stopwords_path": "stopwords.txt" + }, + "shingle_ten": { + "type": "shingle", + "token_separator": "", + "max_shingle_size": 10 + } + }, + "analyzer": { + "korean_analyzer": { + "tokenizer": "nori_user_dic", + "filter": [ + "nori_pos", "nori_readingform", "lowercase", "synonym", "stopwords", "remove_duplicates", "shingle_ten" + ] + } + } + } + }, + "mappings": { + "properties": { + "biz_dataset_id": { + "type": "keyword" + }, + "data_nm": { + "type": "text", + "fields": { + "korean_analyzer": { + "type": "text", + "analyzer": "korean_analyzer", + "search_analyzer": "standard" + } + }, + "fielddata": true + }, + "data_desc": { + "type": "text", + "fields": { + "korean_analyzer": { + "type": "text", + "analyzer": "korean_analyzer", + "search_analyzer": "standard" + } + } + }, + "ctgry": { + "type": "text", + "fields": { + "korean_analyzer": { + "type": "text", + "analyzer": "korean_analyzer", + "search_analyzer": "standard" + } + } + }, + "ctgry_id": { + "type": "keyword" + }, + "src_url": { + "type": "text", + "fields": { + "korean_analyzer": { + "type": "text", + "analyzer": "korean_analyzer", + "search_analyzer": "standard" + } + } + }, + "kywrd": { + "type": "text", + "fields": { + "korean_analyzer": { + "type": "text", + "analyzer": "korean_analyzer", + "search_analyzer": "standard" + } + } + }, + "reg_date": { + "type": "date" + }, + "recnt_amd_date": { + "type": "date" + }, + "lnk_date": { + "type": "date" + }, + "prv_forml": { + "type": "text", + "fields": { + "korean_analyzer": { + "type": "text", + "analyzer": "korean_analyzer", + "search_analyzer": "standard" + } + } + }, + "data_eng_nm": { + "type": "text", + "fields": { + "korean_analyzer": { + "type": "text", + "analyzer": "korean_analyzer", + "search_analyzer": "standard" + } + } + }, + "data_type": { + "type": "text", + "fields": { + "korean_analyzer": { + "type": "text", + "analyzer": "korean_analyzer", + "search_analyzer": "standard" + } + } + }, + "data_clas": { + "type": "text", + "fields": { + "korean_analyzer": { + "type": "text", + "analyzer": "korean_analyzer", + "search_analyzer": "standard" + } + } + }, + "law_review_ncst_yn": { + "type": "keyword" + }, + "secur_review_ncst_yn": { + "type": "keyword" + }, + "data_upd_cycl": { + "type": "keyword" + }, + "tkcgr": { + "type": "keyword" + }, + "tkcg_dept": { + "type": "keyword" + }, + "rqtr": { + "type": "keyword" + }, + "rqt_dept": { + "type": "keyword" + }, + "retv_num": { + "type": "integer" + }, + "intrst_data_num": { + "type": "integer" + }, + "downl_num": { + "type": "integer" + }, + "src_sys": { + "type": "text", + "fields": { + "korean_analyzer": { + "type": "text", + "analyzer": "korean_analyzer", + "search_analyzer": "standard" + } + } + }, + "file_size": { + "type": "text", + "fields": { + "korean_analyzer": { + "type": "text", + "analyzer": "korean_analyzer", + "search_analyzer": "standard" + } + } + } + } + } +} \ No newline at end of file diff --git a/API-SERVICE/Utils/CommonUtil.py b/API-SERVICE/Utils/CommonUtil.py new file mode 100644 index 00000000..dc1825b0 --- /dev/null +++ b/API-SERVICE/Utils/CommonUtil.py @@ -0,0 +1,293 @@ +import argparse +import configparser +import os +import smtplib +import sys +import traceback +import uuid +from datetime import datetime, timedelta +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText +from pathlib import Path +from typing import Any, Optional, Dict +from Utils.keycloak import KeycloakManager + +import jwt +from fastapi.logger import logger +from passlib.context import CryptContext +from psycopg2 import pool +from pytz import timezone + +from ApiService.ApiServiceConfig import config +from ConnectManager import PostgresManager + + +def insert_mail_history(rcv_adr: str, title: str, contents: str, tmplt_cd: str): + db = connect_db() + sql = f""" + INSERT INTO + sitemng.tb_email_send_info (email_id, rcv_adr, title, contents, tmplt_cd, sttus, reg_date) + VALUES + ('{uuid.uuid4()}', '{rcv_adr}', '{title}', '{contents}', '{tmplt_cd}', 'SEND', '{datetime.now()}');""" + db.execute(sql) + + +def send_template_mail(replace_text, receiver_addr, msg_type): + html_part = template_html(msg_type, replace_text) + send_mail( + html_part, + subject=config.email_auth[f"subject_{msg_type}"], + from_=config.email_auth["login_user"], + to_=receiver_addr, + ) + + +def send_mail(msg, **kwargs): + try: + host = kwargs.pop("email_server_host", config.email_auth.get("server_addr")) + port = kwargs.pop("email_server_port", config.email_auth.get("port")) + from_ = kwargs.pop("from_", config.email_auth.get("login_user")) + password = kwargs.pop("password", config.email_auth.get("login_pass")) + + message = MIMEMultipart("alternative") + message["Subject"] = kwargs.pop("subject", "") + message["From"] = from_ + message["To"] = kwargs.pop("to_", "") + message.attach(msg) + + stmp = smtplib.SMTP(host=host, port=port) + stmp.ehlo() + stmp.starttls() + stmp.login(from_, password) + stmp.send_message(message) + stmp.quit() + except Exception as e: + raise e + + +def template_html(msg_type, msg): + template = { + "register": (f"{config.root_path}/conf/common/template/emailAthnSend.html", "AUTH_NO"), + "password": (f"{config.root_path}/conf/common/template/pwdEmailAthn.html", "AUTH_NO"), + "share": (f"{config.root_path}/conf/common/template/shareEmail.html", "URL"), + } + + with open(template[msg_type][0], "r") as fd: + html = "\n".join(fd.readlines()) + html = html.replace(template[msg_type][1], msg) + + return MIMEText(html, "html") + + +def convert_data(data) -> str: + data = str(data) + if data: + if data == "now()" or data == "NOW()": + return data + if data[0] == "`": + return data[1:] + return f"'{data.strip()}'" + + +def set_log_path(): + parser = configparser.ConfigParser() + parser.read(f"{config.root_path}/conf/{config.category}/logging.conf", encoding="utf-8") + + parser.set( + "handler_rotatingFileHandler", + "args", + f"('{config.root_path}/log/{config.category}/{config.category}.log', 'a', 20000000, 10)", + ) + + with open(f"{config.root_path}/conf/{config.category}/logging.conf", "w") as f: + parser.write(f) + + +def get_config(config_name: str): + ano_cfg = {} + + conf = configparser.ConfigParser() + config_path = config.root_path + f"/conf/{config.category}/{config_name}" + conf.read(config_path, encoding="utf-8") + for section in conf.sections(): + ano_cfg[section] = {} + for option in conf.options(section): + ano_cfg[section][option] = conf.get(section, option) + + return ano_cfg + + +def parser_params() -> Any: + parser = argparse.ArgumentParser() + parser.add_argument("--host", type=str, default="127.0.0.1") + parser.add_argument("--port", type=int, default=19000) + parser.add_argument("--category", default="meta") + parser.add_argument("--db_type", default="test") + + return parser.parse_args() + + +def prepare_config() -> None: + args = parser_params() + config.root_path = str(Path(os.path.dirname(os.path.abspath(__file__))).parent) + config.category = args.category + api_router_cfg = get_config("config.ini") + config.api_config = get_config("api_config.ini") + config.server_host = args.host + config.server_port = args.port + config.db_type = f"{args.db_type}_db" + config.db_info = api_router_cfg[config.db_type] + config.conn_pool = make_connection_pool(config.db_info) + if config.category == "common": + config.secret_info = api_router_cfg["secret_info"] + config.user_info = api_router_cfg["user_info"] + config.pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") + config.email_auth = api_router_cfg["email_auth"] + config.keycloak_info = api_router_cfg["keycloak_info"] + if config.category == "meta": + config.email_auth = api_router_cfg["email_auth"] + + +def get_keycloak_manager(): + return KeycloakManager(config.keycloak_info["keycloak_url"]) + + +async def get_admin_token(): + res = await get_keycloak_manager().generate_admin_token( + username=config.keycloak_info["admin_username"], + password=config.keycloak_info["admin_password"], + grant_type="password", + ) + + return res.get("data").get("access_token") + + +def make_connection_pool(db_info): + conn_pool = pool.SimpleConnectionPool( + 1, + 20, + user=db_info["user"], + password=db_info["password"], + host=db_info["host"], + port=db_info["port"], + database=db_info["database"], + options=f'-c search_path={db_info["schema"]}', + connect_timeout=10, + ) + return conn_pool + + +def connect_db(): + db = PostgresManager() + return db + + +def save_file_for_reload(): + with open(__file__, "a") as fd: + fd.write(" ") + + +def make_res_msg(result, err_msg, data=None, column_names=None, kor_column_names=None): + header_list = [] + for index, column_name in enumerate(column_names): + if kor_column_names: + header = { + "column_name": column_name, + "kor_column_name": kor_column_names[index], + } + else: + header = {"column_name": column_name} + header_list.append(header) + + if data is None or column_names is None: + res_msg = {"result": result, "errorMessage": err_msg} + else: + res_msg = { + "result": result, + "errorMessage": err_msg, + "data": {"body": data, "header": header_list}, + } + return res_msg + + +def get_exception_info(): + ex_type, ex_value, ex_traceback = sys.exc_info() + trace_back = traceback.extract_tb(ex_traceback) + trace_log = "\n".join([str(trace) for trace in trace_back]) + logger.error( + f"\n- Exception Type : {ex_type}\n- Exception Message : {str(ex_value).strip()}\n- Exception Log : \n{trace_log}" + ) + return ex_type.__name__ + + +def convert_error_message(exception_name: str): + error_message = None + if exception_name == "UniqueViolation": + error_message = "UNIQUE_VIOLATION" + else: + error_message = exception_name + + return error_message + + +##### for user info ##### +class IncorrectUserName(Exception): + pass + + +class IncorrectPassword(Exception): + pass + + +class LeavedUser(Exception): + pass + + +def get_user(user_name: str): + db = connect_db() + user = db.select( + f'SELECT * FROM {config.user_info["table"]} WHERE {config.user_info["id_column"]} = {convert_data(user_name)}' + ) + return user + + +def create_token(data: dict, expires_delta: Optional[timedelta] = None): + to_encode = data.copy() + if expires_delta: + expire = datetime.now(timezone("Asia/Seoul")) + expires_delta + else: + expire = datetime.now(timezone("Asia/Seoul")) + timedelta(minutes=15) + + logger.info(f"commonToken Expire : {expire}") + to_encode.update({"exp": expire}) + + encoded_jwt = jwt.encode( + to_encode, + config.secret_info["secret_key"], + algorithm=config.secret_info["algorithm"], + ) + return encoded_jwt + + +def make_token_data(user: Dict) -> Dict: + token_data_column = config.secret_info["token_data_column"].split(",") + token_data = {column: user[column] for column in token_data_column} + return token_data + + +def verify_password(plain_password, hashed_password): + return config.pwd_context.verify(plain_password, hashed_password) + + +def authenticate_user(username: str, password: str): + user = get_user(username) + if not user[0]: + raise IncorrectUserName + + user = user[0][0] + if user["user_sttus"] == "SCSN": + raise LeavedUser("user_sttus :: SCSN}") + + if not verify_password(password, user[config.user_info["password_column"]]): + raise IncorrectPassword + return user diff --git a/API-SERVICE/Utils/SearchUtil.py b/API-SERVICE/Utils/SearchUtil.py new file mode 100644 index 00000000..bb48103e --- /dev/null +++ b/API-SERVICE/Utils/SearchUtil.py @@ -0,0 +1,63 @@ +from ELKSearch.Utils.elasticsearch_utils import make_query + + +def search_count(es, item_dict, query_dict): + data_dict = dict() + data_srttn = { + # search_keyword: (result_key, result_data) + "보유데이터": "hasCount", + "연동데이터": "innerCount", + "외부데이터": "externalCount", + "전체": "totalCount", + "해외데이터": "overseaCount" + } + + # ############ data_srttn ############ + i = None + for j, item in enumerate(item_dict["filter"]): + if "data_srttn" in item["match"].keys(): + i = j + break + else: + i = None + + for ko_nm, eng_nm in data_srttn.items(): + if ko_nm == "해외데이터": + index = "ckan_data" + item_dict["filter"] = [] + i = None + else: + index = "biz_meta" + if i is None: + cnt_query = make_query( + "match", "data_srttn", {"operator": "OR", "query": ko_nm} + ) + item_dict["filter"].append(cnt_query) + i = -1 + else: + item_dict["filter"][i]["match"]["data_srttn"]["query"] = ko_nm + + if ko_nm == "전체": + del item_dict["filter"][i] + + query_dict.update(item_dict) + cnt_query = make_query("query", "bool", query_dict) + cnt = es.conn.count(index=index, body=cnt_query)["count"] + data_dict[eng_nm] = cnt + + return data_dict + + +def ckan_query(search_option) -> dict: + search_format = "(*{0}*)" + query_dict = [] + + for query in search_option: + keywords = [search_format.format(word) for keyword in query.keywords for word in keyword.split(" ")] + if len(keywords) > 1: + keywords = f" {query.operator.upper()} ".join(keywords) + else: + keywords = keywords[0] + query_dict.append({"query_string": {"query": keywords,"fields": query.field}}) + + return {"must": query_dict} diff --git a/API-SERVICE/Utils/__init__.py b/API-SERVICE/Utils/__init__.py new file mode 100644 index 00000000..df673ad5 --- /dev/null +++ b/API-SERVICE/Utils/__init__.py @@ -0,0 +1,2 @@ +from .CommonUtil import * +from .SearchUtil import * \ No newline at end of file diff --git a/API-SERVICE/Utils/batch_email.py b/API-SERVICE/Utils/batch_email.py new file mode 100644 index 00000000..bb47f170 --- /dev/null +++ b/API-SERVICE/Utils/batch_email.py @@ -0,0 +1,46 @@ +from email.mime.text import MIMEText + +from ApiService.ApiServiceConfig import config +from Utils.CommonUtil import connect_db +from Utils import send_mail + + +def get_recv_list(): + # batch 1분에 한번씩 email을 전송하고 status를 req에서 send로 변경한다 + query = "SELECT * FROM tb_email_send_info WHERE sttus = 'REQ'" + db = connect_db() + send_list, _ = db.select(query) + return send_list + + +def email_handler(): + send_list = get_recv_list() + print(send_list) + for email_info in send_list: + try: + with open(f'{config.root_path}/conf/sitemng/template/{email_info["tmplt_cd"]}Email.html', "r") as fd: + html = "\n".join(fd.readlines()) + + if email_info["tmplt_cd"] == "share": + subject = "[자동차데이터포털] 자동차데이터포털에서 공유한 데이터입니다." + html = html.replace("URL", email_info['contents']) + else: + if email_info["tmplt_cd"] == "noty": + subject = "[자동차데이터포털] 자동차데이터포털에서 보내는 알림 메일입니다." + else: + subject = f"[자동차데이터포털] {email_info['title']} 신청 메일입니다." + content = email_info["contents"].split("|") + html = html.replace("TITLE", email_info['title']) + html = html.replace("CONTENTS1", content[0]) + html = html.replace("CONTENTS2", content[1]) + + html_part = MIMEText(html, "html") + send_mail(html_part, subject=subject, to_=email_info['rcv_adr']) + except Exception as e: + print(e) + else: + # update status + query = f"UPDATE tb_email_send_info SET sttus = 'SEND'" \ + f"WHERE email_id = '{email_info['email_id']}'" + db = connect_db() + db.execute(query) diff --git a/API-SERVICE/Utils/exceptions.py b/API-SERVICE/Utils/exceptions.py new file mode 100644 index 00000000..98fb70e5 --- /dev/null +++ b/API-SERVICE/Utils/exceptions.py @@ -0,0 +1,6 @@ +class InvalidUserInfo(Exception): + pass + + +class TokenDoesNotExist(Exception): + pass diff --git a/API-SERVICE/Utils/keycloak.py b/API-SERVICE/Utils/keycloak.py new file mode 100644 index 00000000..016a58bd --- /dev/null +++ b/API-SERVICE/Utils/keycloak.py @@ -0,0 +1,245 @@ +from typing import Any, Dict, List +import logging +import aiohttp +import urllib.parse + +logger = logging.getLogger() + + +class KeycloakManager: + _instance = None + + def __new__(cls, *args, **kwargs): + if not cls._instance: + cls._instance = super().__new__(cls) + return cls._instance + + def __init__(self, base_url: str) -> None: + self.base_url = base_url + + async def _request_to_keycloak(self, api_url, method, headers, **kwargs): + """_summary_ + + Args: + api_url (_type_): _description_ + method (_type_): _description_ + headers (_type_): _description_ + + Returns: + _type_: _description_ + """ + data = urllib.parse.urlencode(kwargs) + print(data) + async with aiohttp.ClientSession() as session: + async with session.request(url=api_url, method=method, headers=headers, data=data) as response: + try: + ret = await response.json() + except Exception: + ret = await response.read() + return {"status_code": response.status, "data": ret} + + async def generate_admin_token(self, **kwargs) -> Dict: + """ + 관리자계정에 대한 토큰 발급 + + Args: + username (str): + password (str): + grant_type (str): refresh_token or password + + Returns: + Dict: _description_ + """ + headers = {"Content-Type": "application/x-www-form-urlencoded"} + return await self._request_to_keycloak( + api_url=f"{self.base_url}/realms/master/protocol/openid-connect/token", + client_id="admin-cli", + method="POST", + headers=headers, + **kwargs, + ) + + async def generate_normal_token(self, realm, **kwargs) -> Dict: + """ + 일반회원의 토큰 발급 + + Args: + realm (_type_): keycloak 인증 그룹 + grant_type (str): 인증방법('password', 'refresh_token') + username (str): 계정명 + password (str): 패스워드 + refresh_token (str): 리프레시 토큰 + client_id (str): keycloak client_id + client_secret (str): keycloak_client_id에 대응하는 secret key + + Returns: + Dict: _description_ + """ + + headers = {"Content-Type": "application/x-www-form-urlencoded"} + return await self._request_to_keycloak( + api_url=f"{self.base_url}/realms/{realm}/protocol/openid-connect/token", + method="POST", + headers=headers, + **kwargs, + ) + + async def token_info(self, realm, **kwargs) -> Dict: + """_summary_ + + Args: + realm (_type_): _description_ + + Returns: + Dict: _description_ + """ + + headers = {"Content-Type": "application/x-www-form-urlencoded"} + return await self._request_to_keycloak( + api_url=f"{self.base_url}/realms/{realm}/protocol/openid-connect/token/introspect", + method="POST", + headers=headers, + **kwargs, + ) + + async def create_user(self, token, realm, **kwargs): + headers = {"Content-Type": "application/json", "Authorization": "bearer " + token} + async with aiohttp.ClientSession() as session: + async with session.request( + url=f"{self.base_url}/admin/realms/{realm}/users", + method="POST", + headers=headers, + json=kwargs, + ) as response: + return {"status_code": response.status, "data": await response.read()} + + async def delete_user(self, token, realm, user_id): + headers = {"Authorization": "bearer " + token} + return await self._request_to_keycloak( + api_url=f"{self.base_url}/admin/realms/{realm}/users/{user_id}", method="DELETE", headers=headers + ) + + async def get_user_list(self, token, realm): + headers = {"Authorization": "bearer " + token} + return await self._request_to_keycloak( + api_url=f"{self.base_url}/admin/realms/{realm}/users", method="GET", headers=headers + ) + + async def user_info(self, token, realm): + headers = {"Authorization": "bearer " + token} + return await self._request_to_keycloak( + api_url=f"{self.base_url}/realms/{realm}/protocol/openid-connect/userinfo", method="GET", headers=headers + ) + + async def user_info_detail(self, token, realm, user_id): + headers = {"Authorization": "bearer " + token} + return await self._request_to_keycloak( + api_url=f"{self.base_url}/admin/realms/{realm}/users/{user_id}", method="GET", headers=headers + ) + + async def alter_user(self, token, realm, user_id, **kwargs): + print(f"kwargs :: {kwargs}") + headers = {"Content-Type": "application/json", "Authorization": "bearer " + token} + async with aiohttp.ClientSession() as session: + async with session.request( + url=f"{self.base_url}/admin/realms/{realm}/users/{user_id}", + method="PUT", + headers=headers, + json=kwargs, + ) as response: + return {"status_code": response.status, "data": await response.read()} + + async def check_user_session(self, token, realm, user_id): + headers = {"Authorization": "bearer " + token} + return await self._request_to_keycloak( + api_url=f"{self.base_url}/admin/realms/{realm}/users/{user_id}/sessions", method="GET", headers=headers + ) + + async def logout(self, realm, **kwargs): + headers = {"Content-Type": "application/x-www-form-urlencoded"} + return await self._request_to_keycloak( + api_url=f"{self.base_url}/realms/{realm}/protocol/openid-connect/logout", + method="POST", + headers=headers, + **kwargs, + ) + + +if __name__ == "__main__": + import asyncio + + manager = KeycloakManager("http://192.168.101.44:8080") + d = asyncio.run(manager.generate_admin_token(username="admin", password="zxcv1234!", grant_type="password")) + print(f"admin_token :: {d}") + admin_access_token = d.get("data").get("access_token") + admin_refresh_token = d.get("data").get("refresh_token") + data = { + "username": "swyang", + "firstName": "seokwoo", + "lastName": "yang", + "email": "sw@mobigen.com", + "emailVerified": False, + "enabled": True, + "credentials": [{"value": "zxcv1234!"}], + "attributes": {"phoneNumber": "010-1234-5678", "gender": "male"}, + } + r = asyncio.run( + manager.create_user( + realm="kadap", + token=admin_access_token, + **data, + ) + ) + d = asyncio.run( + manager.generate_normal_token( + realm="kadap", + username="swyang", + password="zxcv1234!", + grant_type="password", + client_id="uyuni", + client_secret="04esVekOjeJZKLHBkgsCQxpbwda41aKW", + ) + ) + print(f"normal token :: {d}") + normal_access_token = d.get("data").get("access_token") + normal_refresh_token = d.get("data").get("refresh_token") + r = asyncio.run( + manager.token_info( + realm="kadap", + token=normal_access_token, + client_id="uyuni", + client_secret="04esVekOjeJZKLHBkgsCQxpbwda41aKW", + ) + ) + print(f"token info :: {r}") + r = asyncio.run(manager.user_info(realm="kadap", token=normal_access_token)) + print(f"user info :: {r}") + user_id = r.get("data").get("sub") + r = asyncio.run(manager.user_info_detail(token=admin_access_token, realm="kadap", user_id=user_id)) + print(f"detail :: {r}") + data = { + "firstName": "seokwoo", + "lastName": "yang", + "email": "sw@mobigen.com", + "emailVerified": False, + "credentials": [{"value": "zxcv1234!"}], + "attributes": {"phoneNumber": "010-1111-1234", "gender": "male"}, + } + r = asyncio.run(manager.alter_user(token=admin_access_token, realm="kadap", user_id=user_id, **data)) + print(f"alter {r}") + r = asyncio.run(manager.check_user_session(token=admin_access_token, realm="kadap", user_id=user_id)) + print(f"check :: {r}") + r = asyncio.run( + manager.logout( + realm="kadap", + grant_type="password", + refresh_token=normal_refresh_token, + client_id="uyuni", + client_secret="04esVekOjeJZKLHBkgsCQxpbwda41aKW", + ) + ) + print(f"logout :: {r}") + r = asyncio.run(manager.delete_user(token=admin_access_token, realm="kadap", user_id=user_id)) + print(f"delete :: {r}") + r = asyncio.run(manager.get_user_list(token=admin_access_token, realm="kadap")) + print(f"list :: {r}") diff --git a/API-SERVICE/conf/common/api_config.ini b/API-SERVICE/conf/common/api_config.ini new file mode 100644 index 00000000..83993524 --- /dev/null +++ b/API-SERVICE/conf/common/api_config.ini @@ -0,0 +1,59 @@ +[commonSelect] +method = POST +url = /portal/api/common/commonSelect +sub_dir = common + +[commonExecute] +method = POST +url = /portal/api/common/commonExecute +sub_dir = common + +[commonRegister] +method = POST +url = /portal/api/common/user/commonRegister +sub_dir = common + +[commonLogin] +method = POST +url = /portal/api/common/user/commonLogin +sub_dir = common + +[commonLogout] +method = POST +url = /portal/api/common/user/commonLogout +sub_dir = common + +[commonToken] +method = GET +url = /portal/api/common/user/commonToken +sub_dir = common + +[commonUserInfo] +method = GET +url = /portal/api/common/user/commonUserInfo +sub_dir = common + +[commonPassword] +method = POST +url = /portal/api/common/user/commonPassword +sub_dir = common + +[emailAthnSend] +method = POST +url = /portal/api/common/user/emailAthnSend +sub_dir = common + +[emailAthnCnfm] +method = POST +url = /portal/api/common/user/emailAthnCnfm +sub_dir = common + +[emailAthnPass] +method = POST +url = /portal/api/common/user/emailAthnPass +sub_dir = common + +[emailDataShare] +method = POST +url = /portal/api/common/user/emailDataShare +sub_dir = common \ No newline at end of file diff --git a/API-SERVICE/conf/common/config.ini b/API-SERVICE/conf/common/config.ini new file mode 100644 index 00000000..7327336f --- /dev/null +++ b/API-SERVICE/conf/common/config.ini @@ -0,0 +1,49 @@ +[test_db] +host = 192.168.100.126 +port = 25432 +user = dpmanager +password = hello.dp12#$ +database = dataportal +schema = users,meta,sitemng,board,analysis,sysconfig,ckan + +[commercial_db] +host = 192.168.54.60 +port = 5432 +user = dpmanager +password = hello.dp12#$ +database = dataportal +schema = users,meta,sitemng,board,analysis,sysconfig,ckan + +# commonLogin, commonLogout, commonRegister, commonToken, commonUserInfo +[secret_info] +secret_key = 09d25e094faa6ca2556c818166b7a9563b93f7099f6f0f4caa6cf63b88e8d3e7 +algorithm = HS256 +expire_min = 30 +cookie_name = user-katech-access-token +token_data_column = user_id,email,login_type,moblphon,user_nm,user_type,user_role,user_uuid,blng_org_cd,blng_org_nm,blng_org_desc + +[user_info] +table = tb_user_info +id_column = user_id +password_column = user_password +normal_password = user_normal +user_role = ROLE_ADMIN,ROLE_OPER + +# emailAthnSend, emailAthnCnfm +[email_auth] +auth_no_len = 10 +subject_register = [자동차데이터포털]회원가입을 위한 인증 메일입니다. +subject_password = [자동차데이터포털]비밀번호 변경을 위한 인증 메일입니다. +subject_share = [자동차데이터포털] 자동차데이터포털에서 공유한 데이터입니다. +server_addr = smtp.office365.com +port = 587 +login_user = admin@bigdata-car.kr +login_pass = Pas07054354@katech! + +[keycloak_info] +keycloak_url = http://192.168.101.44:8080 +admin_username = admin +admin_password = zxcv1234! +realm = kadap +client_id = uyuni +client_secret = 04esVekOjeJZKLHBkgsCQxpbwda41aKW \ No newline at end of file diff --git a/API-SERVICE/conf/common/logging.conf b/API-SERVICE/conf/common/logging.conf new file mode 100644 index 00000000..395206e4 --- /dev/null +++ b/API-SERVICE/conf/common/logging.conf @@ -0,0 +1,28 @@ +[loggers] +keys = root + +[logger_root] +level = INFO +handlers = console,rotatingFileHandler + +[formatters] +keys = default + +[formatter_default] +format = %(asctime)s %(levelname)s [%(filename)s:%(lineno)d] - %(message)s + +[handlers] +keys = console,rotatingFileHandler + +[handler_console] +class = StreamHandler +args = (sys.stdout,) +formatter = default +level = INFO + +[handler_rotatingFileHandler] +class = handlers.RotatingFileHandler +formatter = default +args = ('/Users/cbc/DEV/Mobigen/API_DataPortal/Katech/AP_API_Router/API-SERVICE/log/common/common.log', 'a', 20000000, 10) +level = INFO + diff --git a/API-SERVICE/conf/common/template/emailAthnSend.html b/API-SERVICE/conf/common/template/emailAthnSend.html new file mode 100644 index 00000000..1968d00c --- /dev/null +++ b/API-SERVICE/conf/common/template/emailAthnSend.html @@ -0,0 +1,40 @@ + + + + + + + + +
+
+

+ +

+
+ + + + + + + + + +
+ 자동차데이터포털 사용자 인증 알림 +
+
+

자동차데이터포털에서 사용자 인증을 위해 보낸 인증번호입니다.

+

AUTH_NO

+
+
+
+
+
+ + + \ No newline at end of file diff --git a/API-SERVICE/conf/common/template/pwdEmailAthn.html b/API-SERVICE/conf/common/template/pwdEmailAthn.html new file mode 100644 index 00000000..b1629f17 --- /dev/null +++ b/API-SERVICE/conf/common/template/pwdEmailAthn.html @@ -0,0 +1,40 @@ + + + + + + + + +
+
+

+ +

+
+ + + + + + + + + +
+ 자동차데이터포털 비밀번호 찾기 인증 알림 +
+
+

자동차데이터포털에서 비밀번호 찾기을 위해 보낸 인증번호입니다.

+

AUTH_NO

+
+
+
+
+
+ + + \ No newline at end of file diff --git a/API-SERVICE/conf/common/template/shareEmail.html b/API-SERVICE/conf/common/template/shareEmail.html new file mode 100644 index 00000000..01807d7e --- /dev/null +++ b/API-SERVICE/conf/common/template/shareEmail.html @@ -0,0 +1,37 @@ + + + + + + +
+
+

+ +

+
+ + + + + + + + + +
+ 자동차데이터포털 데이터 공유 메일 +
+
+

자동차데이터포털에서 공유한 데이터입니다.

+

URL

+
+
+
+
+
+ + + \ No newline at end of file diff --git a/API-SERVICE/conf/meta/api_config.ini b/API-SERVICE/conf/meta/api_config.ini new file mode 100644 index 00000000..ff1c1f4f --- /dev/null +++ b/API-SERVICE/conf/meta/api_config.ini @@ -0,0 +1,85 @@ +[getCategoryList] +method = GET +url = /portal/api/meta/getCategoryList +sub_dir = meta + +[updateCategory] +method = POST +url = /portal/api/meta/updateCategory +sub_dir = meta + +[addChildCategory] +method = POST +url = /portal/api/meta/addChildCategory +sub_dir = meta + +[getCategoryTree] +method = GET +url = /portal/api/meta/getCategoryTree +sub_dir = meta + +[insertElsBizMeta] +method = POST +url = /portal/api/meta/insertElsBizMeta +sub_dir = meta + +[getElsBizMetaList] +method = POST +url = /portal/api/meta/getElsBizMetaList +sub_dir = meta + +[deleteElsBizMeta] +method = POST +url = /portal/api/meta/deleteElsBizMeta +sub_dir = meta + +[getCategoryNmCount] +method = GET +url = /portal/api/meta/getCategoryNmCount +sub_dir = meta + +[getPrefixBizMeta] +method = POST +url = /portal/api/meta/getPrefixBizMeta +sub_dir = meta + +[metaInsert] +method = GET +url = /portal/api/sitemng/metaInsert +sub_dir = meta + +[updateElsBizMeta] +method = POST +url = /portal/api/meta/updateElsBizMeta +sub_dir = meta + +[getElsCkanList] +method = POST +url = /portal/api/meta/getElsCkanList +sub_dir = meta + +[updateElsBizMetaBulk] +method = GET +url = /portal/api/meta/updateElsBizMetaBulk +sub_dir = meta + +[ConnectIRIS] +method = GET +url = /portal/api/meta/ConnectIRIS +sub_dir = meta + + +[emailAthnSend] +method = POST +url = /portal/api/meta/emailAthnSend +sub_dir = meta + +[emailAthnCnfm] +method = POST +url = /portal/api/meta/emailAthnCnfm +sub_dir = meta + +[emailAthnPass] +method = POST +url = /portal/api/meta/emailAthnPass +sub_dir = meta \ No newline at end of file diff --git a/API-SERVICE/conf/meta/config.ini b/API-SERVICE/conf/meta/config.ini new file mode 100644 index 00000000..2b7e30f7 --- /dev/null +++ b/API-SERVICE/conf/meta/config.ini @@ -0,0 +1,30 @@ +[test_db] +host = 192.168.100.126 +port = 25432 +user = dpme +password = hello.meta12#$ +database = dataportal +schema = meta + +[commercial_db] +host = 10.10.10.34 +port = 5432 +user = dpme +password = hello.meta12#$ +database = dataportal +schema = meta + +[secret_info] +name = user-katech-access-token +secret = jwt-secrect-b-iris + +# emailAthnSend, emailAthnCnfm +[email_auth] +auth_no_len = 10 +subject_register = [자동차데이터포털]회원가입을 위한 인증 메일입니다. +subject_password = [자동차데이터포털]비밀번호 변경을 위한 인증 메일입니다. +subject_share = [자동차데이터포털] 자동차데이터포털에서 공유한 데이터입니다. +server_addr = smtp.office365.com +port = 587 +login_user = admin@bigdata-car.kr +login_pass = Pas07054354@katech! \ No newline at end of file diff --git a/API-SERVICE/conf/meta/logging.conf b/API-SERVICE/conf/meta/logging.conf new file mode 100644 index 00000000..6395bb2f --- /dev/null +++ b/API-SERVICE/conf/meta/logging.conf @@ -0,0 +1,28 @@ +[loggers] +keys = root + +[logger_root] +level = INFO +handlers = console,rotatingFileHandler + +[formatters] +keys = default + +[formatter_default] +format = %(asctime)s %(levelname)s [%(filename)s:%(lineno)d] - %(message)s + +[handlers] +keys = console,rotatingFileHandler + +[handler_console] +class = StreamHandler +args = (sys.stdout,) +formatter = default +level = INFO + +[handler_rotatingFileHandler] +class = handlers.RotatingFileHandler +formatter = default +args = ('/Users/cbc/DEV/Mobigen/API_DataPortal/Katech/AP_API_Router/API-SERVICE/log/meta/meta.log', 'a', 20000000, 10) +level = INFO + diff --git a/API-SERVICE/conf/sitemng/api_config.ini b/API-SERVICE/conf/sitemng/api_config.ini new file mode 100644 index 00000000..69e125b0 --- /dev/null +++ b/API-SERVICE/conf/sitemng/api_config.ini @@ -0,0 +1,9 @@ +[getCodeInfo] +method = GET +url = /portal/api/sitemng/getCodeInfo +sub_dir = sitemng + +[getCodeList] +method = GET +url = /portal/api/sitemng/getCodeList +sub_dir = sitemng diff --git a/API-SERVICE/conf/sitemng/config.ini b/API-SERVICE/conf/sitemng/config.ini new file mode 100644 index 00000000..ec342771 --- /dev/null +++ b/API-SERVICE/conf/sitemng/config.ini @@ -0,0 +1,19 @@ +[test_db] +host = 192.168.100.126 +port = 25432 +user = dpsi +password = hello.sitemng12#$ +database = dataportal +schema = sitemng + +[commercial_db] +host = 192.168.54.60 +port = 5432 +user = dpsi +password = hello.sitemng12#$ +database = dataportal +schema = sitemng + +[secret_info] +name = user-katech-access-token +secret = jwt-secrect-b-iris \ No newline at end of file diff --git a/API-SERVICE/conf/sitemng/logging.conf b/API-SERVICE/conf/sitemng/logging.conf new file mode 100644 index 00000000..5c387396 --- /dev/null +++ b/API-SERVICE/conf/sitemng/logging.conf @@ -0,0 +1,28 @@ +[loggers] +keys = root + +[logger_root] +level = INFO +handlers = console,rotatingFileHandler + +[formatters] +keys = default + +[formatter_default] +format = %(asctime)s %(levelname)s [%(filename)s:%(lineno)d] - %(message)s + +[handlers] +keys = console,rotatingFileHandler + +[handler_console] +class = StreamHandler +args = (sys.stdout,) +formatter = default +level = INFO + +[handler_rotatingFileHandler] +class = handlers.RotatingFileHandler +formatter = default +args = ('/Users/cbc/DEV/Mobigen/API_DataPortal/Katech/AP_API_Router/API-SERVICE/log/sitemng/sitemng.log', 'a', 20000000, 10) +level = INFO + diff --git a/API-SERVICE/conf/sitemng/template/analysisRequestEmail.html b/API-SERVICE/conf/sitemng/template/analysisRequestEmail.html new file mode 100644 index 00000000..94d0e8fe --- /dev/null +++ b/API-SERVICE/conf/sitemng/template/analysisRequestEmail.html @@ -0,0 +1,39 @@ + + + + + + +
+
+

+ +

+
+ + + + + + + + + +
+ 자동차데이터포털 알림 메일 +
+
+

CONTENTS1

+
+

[ 요청 데이터 목록 ]

+

CONTENTS2

+

+
+
+
+
+ + + \ No newline at end of file diff --git a/API-SERVICE/conf/sitemng/template/notyEmail.html b/API-SERVICE/conf/sitemng/template/notyEmail.html new file mode 100644 index 00000000..ec42acb7 --- /dev/null +++ b/API-SERVICE/conf/sitemng/template/notyEmail.html @@ -0,0 +1,39 @@ + + + + + + +
+
+

+ +

+
+ + + + + + + + + +
+ 자동차데이터포털 알림 메일 +
+
+

CONTENTS1

+

+ 바로가기 +

+
+
+
+
+
+ + + \ No newline at end of file diff --git a/API-SERVICE/conf/sitemng/template/shareEmail.html b/API-SERVICE/conf/sitemng/template/shareEmail.html new file mode 100644 index 00000000..ceff0156 --- /dev/null +++ b/API-SERVICE/conf/sitemng/template/shareEmail.html @@ -0,0 +1,37 @@ + + + + + + +
+
+

+ +

+
+ + + + + + + + + +
+ 자동차데이터포털 데이터 공유 메일 +
+
+

자동차데이터포털에서 공유한 데이터입니다.

+

URL

+
+
+
+
+
+ + + \ No newline at end of file diff --git a/API-SERVICE/conf/sitemng/template/toolApplyEmail.html b/API-SERVICE/conf/sitemng/template/toolApplyEmail.html new file mode 100644 index 00000000..068bf569 --- /dev/null +++ b/API-SERVICE/conf/sitemng/template/toolApplyEmail.html @@ -0,0 +1,37 @@ + + + + + + +
+
+

+ +

+
+ + + + + + + + + +
+ 자동차데이터포털 TITLE 신청 메일 +
+
+

CONTENTS1

+

CONTENTS2

+
+
+
+
+
+ + + \ No newline at end of file diff --git a/API-SERVICE/els_update.py b/API-SERVICE/els_update.py new file mode 100644 index 00000000..825eaef7 --- /dev/null +++ b/API-SERVICE/els_update.py @@ -0,0 +1,74 @@ +import os +import re +from pathlib import Path +from datetime import datetime +from elasticsearch import helpers +from ELKSearch.Utils.database_utils import prepare_config, connect_db, select, config +from ELKSearch.Utils.elasticsearch_utils import data_process, default_process + +root_path = str(Path(os.path.dirname(os.path.abspath(__file__)))) +prepare_config(root_path) + + +def insert_meta(db, es): + bulk_meta_item = list() + db_query = f"SELECT * FROM v_biz_meta_info WHERE status = 'D'" + if config.check == "True": + today = datetime.today().date() + condition = f"AND (DATE(amd_date) >= DATE('{today}')" \ + f"OR DATE(reg_date) >= DATE('{today}'))" + db_query = db_query + condition + + meta_wrap_list = select(db, db_query)[0] + + try: + for meta_wrap in meta_wrap_list: + els_dict = data_process(meta_wrap) + bulk_meta_item.append(els_dict) + helpers.bulk(es.conn, bulk_meta_item, index=es.index) + except Exception as e: + print(e) + + +def insert_ckan(db, es): + bulk_meta_item = list() + db_query = "SELECT biz_dataset_id, data_nm, data_desc, notes, reg_date, tags, updt_dt" \ + " FROM v_biz_meta_ckan" + + if config.check == "True": + today = datetime.today().date() + condition = f"WHERE (DATE(updt_dt) >= DATE('{today}')" \ + f"OR DATE(reg_date) >= DATE('{today}'))" + db_query = db_query + condition + + ckan_wrap_list = select(db, db_query)[0] + try: + for ckan in ckan_wrap_list: + els_dict = default_process(dict(), ckan) + bulk_meta_item.append(els_dict) + helpers.bulk(es.conn, bulk_meta_item, index=es.index) + except Exception as e: + print(e) + + +def main(): + """ + :param + config dir path: {project_path}/ELKSearch/config + --category=ckan|meta, elasticsearch config + --db_type=test|commercial , database config + --check=True|False, True=today False=All + :return: + """ + prepare_config(root_path) + es = config.es + db = connect_db() + + if config.category == "meta": + insert_meta(db, es) + else: + insert_ckan(db, es) + + +if __name__ == "__main__": + main() diff --git a/API-SERVICE/recommend_word.py b/API-SERVICE/recommend_word.py new file mode 100644 index 00000000..b3c63fea --- /dev/null +++ b/API-SERVICE/recommend_word.py @@ -0,0 +1,80 @@ +import os +import re +import ast +from pathlib import Path +from datetime import datetime +from collections import Counter +from ELKSearch.Utils.database_utils import ( + prepare_config, + connect_db, + select, + config, + execute, +) + +root_path = str(Path(os.path.dirname(os.path.abspath(__file__)))) + + +def main(): + """ + param: + parameter는 els_update.py 에서 공통으로 사용 + - db_type: conf/config.ini or ELKSearch/conf/db_config.ini + - check: type str, False or True, True=누적,False=갱신 + + """ + prepare_config(root_path) + db = connect_db() + + # 검색어 로그 불러오기 + search_file_name = ( + f"{root_path}/log/meta/{datetime.today().date().strftime('%Y%m%d')}_search.log" + ) + with open(search_file_name, "r") as fp: + search_log_file = fp.read().split("\n")[:-1] + + # 필터링할 단어 리스트 불러오기 + fword_file_name = f"{root_path}/ELKSearch/conf/bad_word.txt" + with open(fword_file_name, "r") as fp: + bad_word_list = fp.read().split("\n") + + # 자모만 들어가 있는 오타 제외 + today_search_word = [] + for words in search_log_file: + result = [ + word + for word in ast.literal_eval(words) + if re.search("[ㄱ-ㅎㅏ-ㅣ]", word) is None + ] + today_search_word = today_search_word + result + + # 단어 필터링 + today_search_word = [ + word for word in today_search_word if word not in bad_word_list + ] + today_search_word = Counter(today_search_word) + + # check True : 누적 / False: 갱신 + if config.check == "True": + query = "SELECT * FROM tb_recommend_keyword" + recommend_word = select(db, query)[0] + + for word in recommend_word: + key = word["keyword"] + cnt = word["count"] + if key in today_search_word.keys(): + today_search_word[key] = today_search_word[key] + cnt + else: + today_search_word[key] = cnt + + for word, cnt in today_search_word.most_common(10): + query = ( + "INSERT INTO tb_recommend_keyword(keyword,count,use_yn)" + f"VALUES ('{word}',{cnt}, 'N') ON CONFLICT (keyword) DO UPDATE " + f"SET count = {cnt};" + ) + execute(db, db.cursor(), query) + + +if __name__ == "__main__": + main() diff --git a/API-SERVICE/requirements.txt b/API-SERVICE/requirements.txt new file mode 100644 index 00000000..f269d1e6 --- /dev/null +++ b/API-SERVICE/requirements.txt @@ -0,0 +1,48 @@ +aiohttp==3.8.3 +aiosignal==1.2.0 +anyio==3.6.2 +APScheduler==3.9.1.post1 +asgiref==3.5.2 +async-timeout==4.0.2 +asyncssh==2.12.0 +attrs==22.1.0 +bcrypt==4.0.1 +certifi==2022.9.24 +cffi==1.15.1 +charset-normalizer==2.0.12 +click==8.1.3 +cryptography==38.0.1 +decorator==5.1.1 +ecdsa==0.18.0 +elastic-transport==8.4.0 +elasticsearch==8.4.3 +fastapi==0.75.2 +frozenlist==1.3.1 +h11==0.14.0 +idna==3.4 +jose==1.0.0 +multidict==6.0.2 +paramiko==2.10.3 +passlib==1.7.4 +psycopg2==2.8.6 +py==1.11.0 +pyasn1==0.4.8 +pycparser==2.21 +pydantic==1.10.2 +PyJWT==2.6.0 +PyNaCl==1.5.0 +python-jose==3.3.0 +pytz==2022.5 +pytz-deprecation-shim==0.1.0.post0 +requests==2.27.1 +retry==0.9.2 +rsa==4.9 +six==1.16.0 +sniffio==1.3.0 +starlette==0.17.1 +typing_extensions==4.4.0 +tzdata==2022.7 +tzlocal==4.2 +urllib3==1.26.12 +uvicorn==0.16.0 +yarl==1.8.1 diff --git a/API-SERVICE/safe_start.sh b/API-SERVICE/safe_start.sh new file mode 100644 index 00000000..5d2a4c5b --- /dev/null +++ b/API-SERVICE/safe_start.sh @@ -0,0 +1,66 @@ +app_name=API-Service +router_host=$1 +router_port=$2 +category=$3 +router_db=$4 + +input() { + if [[ $router_host == "" ]];then + router_host=192.168.100.126 + fi + if [[ $router_port == "" ]];then + router_port=9014 + fi + if [[ $category == "" ]];then + category=meta + fi + if [[ $router_db == "" ]];then + router_db=test + fi +} + +router_stop() { + app=$( ps -ef | grep python | grep server.py | grep ${router_host} | grep ${router_port} | awk '{print $2}' ) + if [[ $app != "" ]];then + exit_app="kill -9 ${app}" + echo "Stop Command ( router ) : "${exit_app} + $exit_app + else + echo "Not Found application. ( router )" + fi +} + +uvicorn_stop() { + uvicorn=$( netstat -nlp | grep ${router_host}':'${router_port} | awk '{print $7}' | tr "/" "\n" ) + if [[ $uvicorn != "" ]];then + for i in $uvicorn + do + if [[ ${i} == *python* ]];then + continue + fi + exit_uvicorn="kill -9 ${i}" + echo "Stop Command ( uvicorn ) : "${exit_uvicorn} + $exit_uvicorn + done + else + echo "Not Found application. ( uvicorn )" + fi +} + +router_start() { + source_path="$( cd "$( dirname "$0" )" && pwd -P )" + router_exec="nohup python3 ${source_path}/server.py --host ${router_host} --port ${router_port} --category ${category} --db_type ${router_db} 1> /dev/null 2>&1 &" + echo "Start Command : ${router_exec}" + nohup python3 ${source_path}/server.py --host ${router_host} --port ${router_port} --category ${category} --db_type ${router_db} 1> /dev/null 2>&1 & +} + +echo "########## Safe Start (${app_name}) ##########" +echo "========== STOP ${app_name} ==========" +input + +router_stop +sleep 2 +uvicorn_stop + +echo "========== START ${app_name} ==========" +router_start diff --git a/API-SERVICE/server.py b/API-SERVICE/server.py new file mode 100644 index 00000000..47a5b0dc --- /dev/null +++ b/API-SERVICE/server.py @@ -0,0 +1,42 @@ +import os + +import uvicorn +from apscheduler.schedulers.background import BackgroundScheduler +from fastapi import FastAPI + +from ApiService import ApiService +from ApiService.ApiServiceConfig import config +from Utils.CommonUtil import prepare_config, set_log_path + + +prepare_config() +api_router = ApiService() +app = FastAPI() +app.include_router(api_router.router) + + +@app.on_event("startup") +async def startup(): + if config.category == "common": + from Utils import batch_email + scheduler = BackgroundScheduler() + scheduler.add_job(batch_email.email_handler, "interval", seconds=5, id="sender") + scheduler.start() + + +if __name__ == "__main__": + log_dir = f"{config.root_path}/log/{config.category}" + if os.path.isdir(log_dir): + print("Directory Exists") + else: + print(f"Make log dir : {log_dir}") + os.makedirs(log_dir) + + set_log_path() + uvicorn.run( + "server:app", + host=config.server_host, + port=config.server_port, + reload=True, + log_config=f"{config.root_path}/conf/{config.category}/logging.conf", + ) diff --git a/API-SERVICE/start.sh b/API-SERVICE/start.sh new file mode 100644 index 00000000..95f4f281 --- /dev/null +++ b/API-SERVICE/start.sh @@ -0,0 +1,33 @@ +app_name=API-Service +router_host=$1 +router_port=$2 +category=$3 +router_db=$4 + +input() { + if [[ $router_host == "" ]];then + router_host=192.168.100.126 + fi + if [[ $router_port == "" ]];then + router_port=9014 + fi + if [[ $category == "" ]];then + category=meta + fi + if [[ $router_db == "" ]];then + router_db=test + fi +} + +router_start() { + source_path="$( cd "$( dirname "$0" )" && pwd -P )" + router_exec="nohup python3 ${source_path}/server.py --host ${router_host} --port ${router_port} --category ${category} --db_type ${router_db} 1> /dev/null 2>&1 &" + echo "Start Command : ${router_exec}" + nohup python3 ${source_path}/server.py --host ${router_host} --port ${router_port} --category ${category} --db_type ${router_db} 1> /dev/null 2>&1 & +} + +echo "########## Start Application (${app_name}) ##########" +echo "========== START ${app_name} ==========" +input + +router_start diff --git a/API-SERVICE/stop.sh b/API-SERVICE/stop.sh new file mode 100644 index 00000000..e4227863 --- /dev/null +++ b/API-SERVICE/stop.sh @@ -0,0 +1,47 @@ +app_name=API-Service +router_host=$1 +router_port=$2 + +input() { + if [[ $router_host == "" ]];then + router_host=192.168.100.126 + fi + if [[ $router_port == "" ]];then + router_port=9014 + fi +} + +router_stop() { + app=$( ps -ef | grep python | grep server.py | grep ${router_host} | grep ${router_port} | awk '{print $2}' ) + if [[ $app != "" ]];then + exit_app="kill -9 ${app}" + echo "Stop Command ( router ) : "${exit_app} + $exit_app + else + echo "Not Found application. ( router )" + fi +} + +uvicorn_stop() { + uvicorn=$( netstat -nlp | grep ${router_host}':'${router_port} | awk '{print $7}' | tr "/" "\n" ) + if [[ $uvicorn != "" ]];then + for i in $uvicorn + do + if [[ ${i} == *python* ]];then + continue + fi + exit_uvicorn="kill -9 ${i}" + echo "Stop Command ( uvicorn ) : "${exit_uvicorn} + $exit_uvicorn + done + else + echo "Not Found application. ( uvicorn )" + fi +} + +echo "########## Stop Application (${app_name}) ##########" +echo "========== STOP ${app_name} ==========" +input +router_stop +sleep 2 +uvicorn_stop diff --git a/common_libs/libs/__init__.py b/common_libs/libs/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/common_libs/libs/database/__init__.py b/common_libs/libs/database/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/common_libs/libs/database/connector.py b/common_libs/libs/database/connector.py new file mode 100644 index 00000000..a231e790 --- /dev/null +++ b/common_libs/libs/database/connector.py @@ -0,0 +1,42 @@ +import abc +from typing import TypeVar, Tuple, List, Dict + +from fastapi import FastAPI + +T = TypeVar("T", bound="Executor") + + +class Connector(metaclass=abc.ABCMeta): + @abc.abstractmethod + def init_app(self, app: FastAPI, **kwargs): + ... + + @abc.abstractmethod + def get_db(self) -> T: + ... + + +class Executor(metaclass=abc.ABCMeta): + @abc.abstractmethod + def query(self, **kwargs) -> T: + ... + + @abc.abstractmethod + def all(self) -> Tuple[List[dict], int]: + ... + + @abc.abstractmethod + def first(self) -> dict: + ... + + @abc.abstractmethod + def execute(self, **kwargs): + ... + + @abc.abstractmethod + def get_column_info(self, table_nm) -> List[Dict[str, str]]: + ... + + @abc.abstractmethod + def close(self): + ... diff --git a/common_libs/libs/database/models.py b/common_libs/libs/database/models.py new file mode 100644 index 00000000..76ba8240 --- /dev/null +++ b/common_libs/libs/database/models.py @@ -0,0 +1,159 @@ +from sqlalchemy.orm import Session + +from .orm import db + + +class BaseMixin: + # id = Column(Integer, primary_key=True, index=True) + # created_at = Column(DateTime, nullable=False, default=func.utc_timestamp()) + # updated_at = Column(DateTime, nullable=False, default=func.utc_timestamp(), onupdate=func.utc_timestamp()) + + def __init__(self): + self._q = None + self._session = None + self.served = None + self.id = 0 + + def all_columns(self): + return [c for c in self.__table__.columns if c.primary_key is False and c.name != "created_at"] + + def __hash__(self): + return hash(self.id) + + @classmethod + def create(cls, session: Session, auto_commit=False, **kwargs): + """ + 테이블 데이터 적재 전용 함수 + :param session: + :param auto_commit: 자동 커밋 여부 + :param kwargs: 적재 할 데이터 + :return: + """ + obj = cls() + for col in obj.all_columns(): + col_name = col.name + if col_name in kwargs: + setattr(obj, col_name, kwargs.get(col_name)) + session.add(obj) + session.flush() + if auto_commit: + session.commit() + return obj + + @classmethod + def get(cls, session: Session = None, **kwargs): + """ + Simply get a Row + :param session: + :param kwargs: + :return: + """ + sess = next(db.session()) if not session else session + query = sess.query(cls) + for key, val in kwargs.items(): + col = getattr(cls, key) + query = query.filter(col == val) + + if query.count() > 1: + raise Exception("Only one row is supposed to be returned, but got more than one.") + result = query.first() + if not session: + sess.close() + return result + + @classmethod + def filter(cls, session: Session = None, **kwargs): + """ + Simply get a Row + :param session: + :param kwargs: + :return: + """ + cond = [] + for key, val in kwargs.items(): + key = key.split("__") + if len(key) > 2: + raise Exception("No 2 more dunders") + col = getattr(cls, key[0]) + if len(key) == 1: + cond.append((col == val)) + elif len(key) == 2 and key[1] == "gt": + cond.append((col > val)) + elif len(key) == 2 and key[1] == "gte": + cond.append((col >= val)) + elif len(key) == 2 and key[1] == "lt": + cond.append((col < val)) + elif len(key) == 2 and key[1] == "lte": + cond.append((col <= val)) + elif len(key) == 2 and key[1] == "in": + cond.append((col.in_(val))) + obj = cls() + if session: + obj._session = session + obj.served = True + else: + obj._session = next(db.session()) + obj.served = False + query = obj._session.query(cls) + query = query.filter(*cond) + obj._q = query + return obj + + @classmethod + def cls_attr(cls, col_name=None): + if col_name: + col = getattr(cls, col_name) + return col + else: + return cls + + def order_by(self, *args: str): + for a in args: + if a.startswith("-"): + col_name = a[1:] + is_asc = False + else: + col_name = a + is_asc = True + col = self.cls_attr(col_name) + self._q = self._q.order_by(col.asc()) if is_asc else self._q.order_by(col.desc()) + return self + + def update(self, auto_commit: bool = False, **kwargs): + qs = self._q.update(kwargs) + get_id = self.id + ret = None + + self._session.flush() + if qs > 0: + ret = self._q.first() + if auto_commit: + self._session.commit() + return ret + + def first(self): + result = self._q.first() + self.close() + return result + + def delete(self, auto_commit: bool = False): + self._q.delete() + if auto_commit: + self._session.commit() + + def all(self): + print(self.served) + result = self._q.all() + self.close() + return result + + def count(self): + result = self._q.count() + self.close() + return result + + def close(self): + if not self.served: + self._session.close() + else: + self._session.flush() diff --git a/common_libs/libs/database/orm.py b/common_libs/libs/database/orm.py new file mode 100644 index 00000000..5e94decc --- /dev/null +++ b/common_libs/libs/database/orm.py @@ -0,0 +1,253 @@ +from typing import Dict, List, Union, Tuple + +import sqlalchemy +from fastapi import FastAPI +from sqlalchemy import Column, MetaData, and_, create_engine, not_, or_ +from sqlalchemy.orm import sessionmaker, declarative_base + +from .connector import Connector, Executor + +db = declarative_base() + + +class SQLAlchemyConnector(Connector): + def __init__(self, base=None, app: FastAPI = None, **kwargs): + self._engine = None + self._Base = base + self._session = None + self._session_instance = None + self._metadata = None + self._q = None + self._cnt = 0 + self._column_names = [] + if app is not None: + self.init_app(app=app, **kwargs) + + def init_app(self, app: FastAPI, **kwargs): + database_url = kwargs.get("DB_URL") + pool_recycle = kwargs.get("DB_POOL_RECYCLE", 900) + echo = kwargs.get("DB_ECHO", True) + + self._engine = create_engine( + database_url, + echo=echo, + pool_recycle=pool_recycle, + pool_pre_ping=True, + ) + + self._metadata = MetaData() + for schema in kwargs.get("DB_INFO").get("SCHEMA").split(","): + self._metadata.reflect(bind=self.engine, views=True, schema=schema) + + @app.on_event("startup") + def startup(): + self._engine.connect() + + @app.on_event("shutdown") + def shutdown(): + self._session.close_all() + self._engine.dispose() + + def get_db(self) -> "SQLAlchemyConnector": + if self.session is None: + raise Exception("must be called 'init_db'") + try: + self._session_instance = self._session() + yield self + finally: + self._session_instance.close() + + +class OrmExecutor(Executor): + def __init__(self, engine) -> None: + self.engine = engine + self._session = sessionmaker(autocommit=False, autoflush=False, bind=self._engine) + + def query(self, **kwargs) -> "SQLAlchemyConnector": + base_table = self.get_table(kwargs["table_nm"]) + key = kwargs["key"] + # Join + if join_info := kwargs["join_info"]: + join_table = self.get_table(join_info.table_nm) + query = self._session_instance.query(base_table, join_table).join( + join_table, + getattr(base_table.columns, key) == getattr(join_table.columns, join_info.key), + ) + else: + query = self._session_instance.query(base_table) + + # Where + if where_info := kwargs["where_info"]: + filter_val = None + for where_condition in where_info: + filter_condition = self._parse_operand( + getattr(base_table.columns, where_condition.key), + where_condition.value, + where_condition.compare_op, + ) + if sub_conditions := where_condition.sub_conditions: + for sub_condition in sub_conditions: + sub_filter_condition = self._parse_operand( + getattr(base_table.columns, sub_condition.key), + sub_condition.value, + sub_condition.compare_op, + ) + # or_ , | 사용무관 + if sub_condition.op.lower() == "or": + filter_condition = or_(filter_condition, sub_filter_condition) + elif sub_condition.op.lower() == "and": + filter_condition = and_(filter_condition, sub_filter_condition) + + if filter_val is not None: + if where_condition.op.lower() == "or": + filter_val = filter_val | filter_condition + elif where_condition.op.lower() == "and": + filter_val = filter_val & filter_condition + else: + filter_val = filter_condition + query = query.filter(filter_val) + + self._cnt = query.count() + + # Order + if order_info := kwargs["order_info"]: + order_key = getattr(base_table.columns, order_info.key) + query = query.order_by(getattr(sqlalchemy, order_info.order.lower())(order_key)) + + # Paging + if page_info := kwargs["page_info"]: + per_page = page_info.per_page + cur_page = page_info.cur_page + query = query.limit(per_page).offset((cur_page - 1) * per_page) + + self._q = query + self._column_names = [column.name for column in base_table.columns] + return self + + def all(self) -> Tuple[List[dict], int]: + data = [dict(zip(self._column_names, data)) for data in self._q.all()] + + return data, self._cnt + + def first(self): + data = self._q.first() + return data + + def execute(self, **kwargs): + """ + [ + { + "method":"INSERT", + "table_nm":"inqr_bas", + "data":{ + "id":"9bb29b2b-159e-4cee-89af-a80cfe6f0651", + "title":"test문의", + "sbst":"문으으으의", + "ctg_id":"INQR001", + "reg_user_nm":"테스터", + "cmpno":"dev-12346578", + "del_yn":"N", + "reg_user":"f142cdc2-207b-4eda-9e7d-2605e4e65571", + "reg_date":"NOW()", + "amd_user":"f142cdc2-207b-4eda-9e7d-2605e4e65571", + "amd_date":"NOW()" + } + } + ] + [ + { + "method":"UPDATE", + "table_nm":"inqr_bas", + "key": ["id"], + "data":{ + "id":"9bb29b2b-159e-4cee-89af-a80cfe6f0651", + "title":"test문의111111", + "sbst":"문으으으의" + } + } + ] + [ + { + "method":"DELETE", + "table_nm":"inqr_bas", + "key": ["id"], + "data":{ + "id":"9bb29b2b-159e-4cee-89af-a80cfe6f0651" + } + } + ] + + {"result":1,"errorMessage":""} + """ + # try: + # session.begin() + + # for row in params: + # method = row.method.lower() + # table = db.get_table(row.table_nm) + # cond = [getattr(table.columns, k) == row.data[k] for k in row.key] if row.key else [] + + # if method == "insert": + # ins = table.insert().values(**row.data) + # session.execute(ins) + # elif method == "update": + # stmt = table.update().where(*cond).values(**row.data) + # session.execute(stmt) + # elif method == "delete": + # stmt = table.delete().where(*cond) + # session.execute(stmt) + # else: + # raise NotImplementedError + + # session.commit() + # except Exception as e: + # session.rollback() + # raise e + + def get_table(self, table_nm): + for nm, t in self._metadata.tables.items(): + if table_nm in nm: + return t + + def _parse_operand(key: Column, value: Union[str, int], compare: str): + compare = compare.lower() + if compare in ["equal", "="]: + return key == value + elif compare in ["not equal", "!="]: + return key != value + elif compare in ["greater than", ">"]: + return key > value + elif compare in ["greater than or equal", ">="]: + return key >= value + elif compare in ["less than", "<"]: + return key < value + elif compare in ["less than or equal", "<="]: + return key <= value + elif compare == "like": + return key.like(value) + elif compare == "not like": + return not_(key.like(value)) + elif compare == "in": + return key.in_(value.split(",")) + elif compare == "not in": + return not_(key.in_(value.split(","))) + elif compare == "ilike": + return key.ilike(value) + else: + return + + @property + def session(self): + if self._session_instance: + return self._session_instance + + @property + def engine(self): + return self._engine + + @property + def Base(self): + return self._Base + + def get_column_info(self, table_nm) -> List[Dict[str, str]]: + ... diff --git a/common_libs/libs/database/tibero.py b/common_libs/libs/database/tibero.py new file mode 100644 index 00000000..9f357ef6 --- /dev/null +++ b/common_libs/libs/database/tibero.py @@ -0,0 +1,230 @@ +import logging +from datetime import datetime +from decimal import Decimal +from typing import Dict, List, Tuple + +import pyodbc +from fastapi import FastAPI + +from .connector import Connector, Executor + +logger = logging.getLogger() + + +class QueryExecutor(Executor): + def __init__(self, conn: pyodbc.Connection): + self.conn = conn + self._q = None + self._cntq = None + self.cur = conn.cursor() + + def query(self, **kwargs) -> "QueryExecutor": + """ + SELECT * + FROM ( + SELECT ROWNUM AS rn, subquery.* + FROM ( + SELECT * + FROM ACT_SRVY + JOIN ACT_SRV_FILE_DETLS ON ACT_SRVY.IDX = ACT_SRV_FILE_DETLS.IDX + WHERE ACT_SRVY.OFANSTP = 'CMPLT' + ) AS subquery + ) AS main_query + WHERE main_query.rn > 0; + """ + table_nm = kwargs.get("table_nm") + join_key = kwargs.get("key") + + join_clause = "" + if join_info := kwargs.get("join_info"): + t = join_info["table_nm"] + k = join_info["key"] + join_clause += f"join {t} on {table_nm}.{join_key} = {t}.{k} " + + where_clause = "" + if where_info := kwargs.get("where_info"): + where_clause += "where " + for info in where_info: + clause = self._calc_operand(f"{info['table_nm']}.{info['key']}", info["value"], info["compare_op"]) + if "sub" in info and info["sub"]: + where_clause += f"{info['op']} ({clause} " + for sub in info["sub"]: + sub_clause = self._calc_operand( + f"{sub['table_nm']}.{sub['key']}", sub["value"], sub["compare_op"] + ) + where_clause += f"{sub['op']} {sub_clause}" + where_clause += ") " + else: + where_clause += f"{info['op']} {clause} " + + order_clause = "" + if order_info := kwargs.get("order_info"): + t = order_info["table_nm"] + k = order_info["key"] + o = order_info["order"] + order_clause += f"order by {t}.{k} {str(o).upper()} " + + query = f"select * from {table_nm} " + join_clause + where_clause + order_clause + self._cntq = f"select count(*) from {table_nm} " + join_clause + where_clause + order_clause + + page_clause = "" + if page_info := kwargs.get("page_info"): + per_page = page_info["per_page"] + offset = (page_info["cur_page"] - 1) * per_page + limit = offset + per_page + page_clause += "select * from (select ROWNUM as SEQ, sq.* " + page_clause += f"from ({query}) as sq) as mq where mq.SEQ > {offset} and mq.SEQ <= {limit}" + query = page_clause + + self._q = query + logger.info(query) + return self + + def all(self) -> Tuple[List[dict], int]: + try: + rows = self.cur.execute(self._q).fetchall() + if rows: + datas = [dict(zip(self._get_headers(self.cur), self._parse_select_data(row))) for row in rows] + return datas, int(self.cur.execute(self._cntq).fetchone()[0]) + except TypeError as te: + logger.warning(te) + return + except Exception as e: + raise e + + def first(self) -> Dict: + try: + row = self.cur.execute(self._q).fetchone() + if row: + return dict(zip(self._get_headers(self.cur), self._parse_select_data(row))) + except TypeError as te: + logger.warning(te) + return + except Exception as e: + raise e + + def execute(self, **kwargs): + def parse_update_data(datas): + ret = [] + for data in datas: + if isinstance(data, str) and data.upper().startswith("NOW"): + ret.append(datetime.now()) + elif isinstance(data, str) and data.lower().startswith("`"): + pass + else: + ret.append(data) + return tuple(ret) + + method = str(kwargs.get("method")).lower() + data = kwargs.get("data") + params = parse_update_data(data.values()) + + query = "" + if method == "insert": + query += f"insert into {kwargs.get('table_nm')} " + query += f"({','.join(data.keys())}) " + query += f"values ({','.join(['?']*len(data))})" + elif method == "update": + query += f"update {kwargs.get('table_nm')} " + place_hold = [ + f"{k} = {data[k][1:]}" if isinstance(data[k], str) and data[k].startswith("`") else f"{k} = ?" + for k in data.keys() + ] + query += f"set {','.join(place_hold)} " + + k0, *ks = kwargs.get("key") + query += f"where {k0} = '{data[k0]}' " + if ks: + for k in ks: + query += f"and {k} = '{data[k]}' " + elif method == "delete": + query += f"delete from {kwargs.get('table_nm')} " + query += f"where {' and '.join([f'{k} = ?' for k in kwargs.get('key')])}" + + else: + raise Exception(f"{method} :: Mehtod not allowed") + + logger.info(f"query :: {query}") + try: + self.cur.execute(query, params) + self.conn.commit() + except Exception as e: + self.conn.rollback() + logger.error(f"error at params :: {params}") + raise e + + def _parse_select_data(self, row): + return map(lambda x: int(x) if isinstance(x, Decimal) else x, row) + + def _get_headers(self, cursor) -> list[str]: + return [d[0].lower() for d in cursor.description] + + def _calc_operand(self, k, v, operand) -> str: + if operand in ["Equal", "="]: + return f"{k} = '{v}'" + elif operand in ["Not Equal", "!="]: + return f"{k} != '{v}'" + elif operand in ["Greater Than", ">"]: + return f"{k} > '{v}'" + elif operand in ["Greater Than or Equal", ">="]: + return f"{k} >= '{v}'" + elif operand in ["Less Than", "<"]: + return f"{k} < '{v}'" + elif operand in ["Less Than or Equal", "<="]: + return f"{k} <='{v}'" + elif operand.lower() in ["ilike"]: + return f"upper({k}) like '{v}'" + elif operand.lower() in ["in"]: + v = ",".join([f"'{x}'" for x in v.split(",")]) + return f"{k} in ({v})" + else: + return f"{k} {operand} '{v}'" + + def get_column_info(self, table_nm, schema) -> List[Dict[str, str]]: + # OWNER, TABLE_NAME, COLUMN_NAME, COMMENT + query = ( + f"SELECT * FROM ALL_COL_COMMENTS WHERE TABLE_NAME = '{table_nm.upper()}' AND OWNER = '{schema.upper()}';" + ) + logger.info(query) + self.cur.execute(query) + return [{"column_name": str(row[2]).lower(), "kor_column_name": row[3]} for row in self.cur.fetchall()] + + def close(self): + if self.cur: + self.cur.close() + + +class TiberoConnector(Connector): + def __init__(self, app: FastAPI = None, **kwargs): + self.conn = None + self.cur = None + self._q = None + self._cntq = None + if app is not None: + self.init_app(app, kwargs) + + def init_app(self, app: FastAPI, **kwargs): + def __convert_timestamp(value): + value = datetime.strptime(value.decode(), "%Y/%m/%d %H:%M:%S.%f") + return value.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3] + + @app.on_event("startup") + async def startup(): + self.conn = pyodbc.connect(kwargs.get("DB_URL"), autocommit=False) + self.conn.setdecoding(pyodbc.SQL_CHAR, encoding="utf-8") + self.conn.setdecoding(pyodbc.SQL_WCHAR, encoding="utf-32le") + self.conn.setdecoding(pyodbc.SQL_WMETADATA, encoding="utf-32le") + self.conn.add_output_converter(pyodbc.SQL_TYPE_TIMESTAMP, __convert_timestamp) + self.conn.setencoding(encoding="utf-8") + + @app.on_event("shutdown") + async def shutdown(): + if self.conn: + self.conn.close() + + def get_db(self) -> "TiberoConnector": + executor = QueryExecutor(self.conn) + try: + yield executor + finally: + executor.close() diff --git a/common_libs/libs/logging_temp.py b/common_libs/libs/logging_temp.py new file mode 100644 index 00000000..e9f285d0 --- /dev/null +++ b/common_libs/libs/logging_temp.py @@ -0,0 +1,7 @@ +log_config = { + "version": 1, + "disable_existing_loggers": False, + "formatters": {"default": {"format": "%(asctime)s %(levelname)s [%(filename)s:%(lineno)d] - %(message)s"}}, + "handlers": {"console": {"class": "logging.StreamHandler", "level": "DEBUG", "formatter": "default"}}, + "loggers": {"local": {"level": "DEBUG", "handlers": ["console"], "propagate": True}}, +} diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..e6b7c555 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,44 @@ +aiohttp==3.8.3 +aiosignal==1.2.0 +anyio==3.6.2 +asgiref==3.5.2 +async-timeout==4.0.2 +asyncssh==2.12.0 +attrs==22.1.0 +bcrypt==4.0.1 +certifi==2022.9.24 +cffi==1.15.1 +charset-normalizer==2.0.12 +click==8.1.3 +cryptography==38.0.1 +decorator==5.1.1 +ecdsa==0.18.0 +elastic-transport==8.4.0 +elasticsearch==8.4.3 +fastapi==0.75.2 +frozenlist==1.3.1 +h11==0.14.0 +idna==3.4 +jose==1.0.0 +multidict==6.0.2 +paramiko==2.10.3 +passlib==1.7.4 +psycopg2==2.8.6 +py==1.11.0 +pyasn1==0.4.8 +pycparser==2.21 +pydantic==1.10.2 +PyJWT==2.6.0 +PyNaCl==1.5.0 +python-jose==3.3.0 +pytz==2022.5 +requests==2.27.1 +retry==0.9.2 +rsa==4.9 +six==1.16.0 +sniffio==1.3.0 +starlette==0.17.1 +typing_extensions==4.4.0 +urllib3==1.26.12 +uvicorn==0.16.0 +yarl==1.8.1 diff --git a/setup.py b/setup.py new file mode 100644 index 00000000..a8fe34d6 --- /dev/null +++ b/setup.py @@ -0,0 +1,12 @@ +from setuptools import setup, find_packages + +setup( + name='mobi_router', + version='1.0', + author='mobigen', + author_email='cbccbs@mobigen.co.kr', + python_requires='>=3.6', + + packages=find_packages(exclude=['docs', 'tests*', '__pycache__/']), + # packages=['ConnectManager'] +)