ํฐ์คํ ๋ฆฌ ๋ทฐ
๐งฏ ์ค์ ์ด์ ๋ชจ๋ – ์ค๋ฅ ๊ฐ์ง, ์ฌ๋ ์๋ฆผ, ๋ฐฑ์ ์ ๋ต (์ด์ ์๋ํ Ops ์์ ์ฒด)
octo54 2025. 10. 23. 17:19๐งฏ ์ค์ ์ด์ ๋ชจ๋ – ์ค๋ฅ ๊ฐ์ง, ์ฌ๋ ์๋ฆผ, ๋ฐฑ์ ์ ๋ต (์ด์ ์๋ํ Ops ์์ ์ฒด)
๊ฐ๋ฐ·๋ฐฑํ
์คํธ·์๋๋งค๋งค๊น์ง ๋ค ๋ง๋ค์๋๋ฐ… ์ด์ ์ค์ ํ๋ฃจ ๋ฉ์ถ๋ฉด ์ฑ๊ณผ๊ฐ ์ ๋ถ ํ๋ค๋ฆฝ๋๋ค.
์ด๋ฒ ํธ์ ์ง์ง ํ๋์ฒ๋ผ **์์ ์ ์ผ๋ก “๋ ๋ฒ๋ ์์คํ
”**์ ๋ง๋ค๊ธฐ ์ํ ์ด์ ์๋ํ(Ops) ๋ชจ๋์ ๋ถ์
๋๋ค.
- ์ค๋ฅ๋ฅผ ์ฆ์ ๊ฐ์งํ๊ณ
- ์ฌ๋์ผ๋ก ์๋ฆผ์ ์๊ณ
- DB์ ๋ก๊ทธ๋ฅผ ์๋ ๋ฐฑ์ ํ๊ณ
- ์๋น์ค ํฌ์ค์ฒดํฌ/์ฌ์์๊น์ง
์๋ ์ฝ๋๋ ์ ๊ฐ ์ง์ ๊ฒ์ฆํ ๋จ์ผ ํ์ด์ฌ ์คํ ์คํฌ๋ฆฝํธ์ ๋๋ค. ๊ทธ๋๋ก ๋ถ์ฌ ๋ฃ๊ณ .env๋ง ์ฑ์๋ ๋์๊ฐ๊ฒ ์ค๊ณํ์ต๋๋ค.
๊ตฌ์ฑ ํ๋์
[์ฑ ๋ก์ง(๋ฆฌ๋ฐธ๋ฐ์ฑ/์ฃผ๋ฌธ)]
โโ ๊ตฌ์กฐ์ ๋ก๊น
(JSON)
โโ ์์ธ ๊ฐ์ง → Slack Alert (+ ์ฌ์๋ ๋ฐฑ์คํ)
โโ ํฌ์ค ์ฒดํฌ /health
โโ ์ผ์ผ ์๋ ๋ฐฑ์
(DB/๋ก๊ทธ ์ค๋
์ท ํ์ ๋ณด๊ด)
0) ํ๊ฒฝ ๋ณ์(.env) ์์
# .env
ENV=prod
SLACK_WEBHOOK_URL=https://hooks.slack.com/services/XXX/YYY/ZZZ
DB_URL=sqlite:///quant.db
BACKUP_DIR=./backups
RETENTION_DAYS=14
HEALTH_PORT=8080
โ ๏ธ ์ ๋ ์ฝ๋์ ๋น๋ฐ์ ํ๋์ฝ๋ฉํ์ง ๋ง์ธ์. .env + ํ๊ฒฝ๋ณ์๋ก๋ง ์ฃผ์ !
1) ์ด์ ์๋ํ ํตํฉ ์คํฌ๋ฆฝํธ (์ด์์ฉ ๋ฉ์ธ)
์๋ ํ๋๋ก ๋ก๊น + ์๋ฆผ + ์ฌ์๋ + ๋ฐฑ์ + ํฌ์ค์ฒดํฌ + ์ค์ผ์ค๋ฌ๋ฅผ ๋ฌถ์์ต๋๋ค.
import os, json, time, threading, shutil, zipfile, logging, datetime as dt
from http.server import BaseHTTPRequestHandler, HTTPServer
from logging.handlers import RotatingFileHandler
from pathlib import Path
import pandas as pd
import yfinance as yf
from dotenv import load_dotenv
import requests
# ========= ์ค์ =========
load_dotenv()
ENV = os.getenv("ENV", "dev")
SLACK_WEBHOOK_URL = os.getenv("SLACK_WEBHOOK_URL", "")
DB_URL = os.getenv("DB_URL", "sqlite:///quant.db")
BACKUP_DIR = Path(os.getenv("BACKUP_DIR", "./backups"))
RETENTION_DAYS = int(os.getenv("RETENTION_DAYS", "14"))
HEALTH_PORT = int(os.getenv("HEALTH_PORT", "8080"))
TICKERS = ["005930.KS", "000660.KS", "035420.KS"] # ์์
# ========= ๋ก๊น
(๊ตฌ์กฐ์ JSON + ํ์ผ ํ์ ) =========
LOG_DIR = Path("./logs"); LOG_DIR.mkdir(exist_ok=True)
log_path = LOG_DIR / "app.log"
class JsonFormatter(logging.Formatter):
def format(self, record):
payload = {
"ts": dt.datetime.utcnow().isoformat() + "Z",
"level": record.levelname,
"msg": record.getMessage(),
"name": record.name,
}
if record.exc_info:
payload["exc_info"] = self.formatException(record.exc_info)
return json.dumps(payload, ensure_ascii=False)
logger = logging.getLogger("quant.ops")
logger.setLevel(logging.INFO)
_handler = RotatingFileHandler(log_path, maxBytes=5_000_000, backupCount=5, encoding="utf-8")
_handler.setFormatter(JsonFormatter())
logger.addHandler(_handler)
# ์ฝ์์๋ ์ถ๋ ฅ
_console = logging.StreamHandler()
_console.setFormatter(JsonFormatter())
logger.addHandler(_console)
def log_info(msg, **kv): logger.info(json.dumps({"msg": msg, **kv}, ensure_ascii=False))
def log_err(msg, **kv): logger.error(json.dumps({"msg": msg, **kv}, ensure_ascii=False))
# ========= ์ฌ๋ ์๋ฆผ =========
def slack_alert(text: str, level: str = "info", blocks=None):
if not SLACK_WEBHOOK_URL:
log_info("Slack webhook not set; printing instead", text=text)
return
color = "#2eb886" if level=="info" else "#e01e5a"
payload = {
"attachments": [{
"color": color,
"mrkdwn_in": ["text"],
"text": f"*[{ENV}] {text}*",
}]
}
if blocks:
payload["blocks"] = blocks
try:
r = requests.post(SLACK_WEBHOOK_URL, json=payload, timeout=5)
r.raise_for_status()
except Exception as e:
log_err("Slack alert failed", error=str(e))
# ========= ๊ฐ๋จ ๋ฐฑ์คํ ์ฌ์๋ =========
def with_retry(fn, *, tries=3, base_delay=2.0, factor=2.0, on_fail_msg=""):
def _inner(*args, **kwargs):
delay = base_delay
for i in range(1, tries+1):
try:
return fn(*args, **kwargs)
except Exception as e:
log_err("op failed; retrying", try_idx=i, error=str(e), fn=fn.__name__)
if i == tries:
slack_alert(f"๐จ ์คํจ: {on_fail_msg or fn.__name__} ({e})", level="error")
raise
time.sleep(delay)
delay *= factor
return _inner
# ========= ํต์ฌ ์์
: ๋ฐ์ดํฐ ๊ฐฑ์ & ๋ญํน =========
@with_retry
def collect_factor_data():
rows = []
for t in TICKERS:
df = yf.download(t, period="1y", progress=False)
if df.empty or len(df) < 120:
raise RuntimeError(f"Not enough data for {t}")
ret6m = (df["Close"].iloc[-1] / df["Close"].iloc[-120]) - 1
vol = df["Close"].pct_change().rolling(60).std().iloc[-1]
rows.append({"ticker": t, "ret6m": float(ret6m), "vol60": float(vol)})
fact = pd.DataFrame(rows).sort_values(["ret6m", "vol60"], ascending=[False, True])
out = Path("data"); out.mkdir(exist_ok=True)
fact.to_csv(out / "factors_latest.csv", index=False, encoding="utf-8")
log_info("factor data refreshed", rows=len(rows))
return fact
@with_retry
def select_and_emit_orders(top_k=2):
fact = pd.read_csv("data/factors_latest.csv")
picks = fact.head(top_k).copy()
picks["qty"] = 5 # ์์ ๊ณ ์ ์๋
# ์ค์ ์ฃผ๋ฌธ API ์ฐ๋ ๋์ , ์ฃผ๋ฌธ ํ์ผ ๋ก๊ทธ๋ก ๊ธฐ๋ก
ts = dt.datetime.now().strftime("%Y%m%d_%H%M%S")
orders_path = Path("data") / f"orders_{ts}.csv"
picks[["ticker","qty"]].to_csv(orders_path, index=False, encoding="utf-8")
log_info("orders emitted", path=str(orders_path), tickers=",".join(picks["ticker"].tolist()))
slack_alert(f"โ
์ฃผ๋ฌธ ํ๋ณด ์ฐ์ถ: {', '.join(picks['ticker'].tolist())}")
return orders_path
# ========= ๋ฐฑ์
(์ค๋
์ท + ๋ณด๊ด ๊ธฐ๊ฐ ํ์ ) =========
def _zipdir(src_dir: Path, zf: zipfile.ZipFile):
for p in src_dir.rglob("*"):
if p.is_file():
zf.write(p, arcname=p.relative_to(src_dir))
def rotate_backups():
BACKUP_DIR.mkdir(exist_ok=True)
stamp = dt.datetime.now().strftime("%Y%m%d_%H%M%S")
# ๋ฐ์ดํฐ/๋ก๊ทธ/DB(ํ์ผํํ) ์ค๋
์ท
archive = BACKUP_DIR / f"snapshot_{stamp}.zip"
with zipfile.ZipFile(archive, "w", compression=zipfile.ZIP_DEFLATED) as zf:
for d in [Path("data"), Path("logs")]:
if d.exists(): _zipdir(d, zf)
# SQLite ํ์ผ๋ง ์ง๋ ฌ ๋ณด๊ด (DB_URL์ด sqlite์ผ ๋)
if DB_URL.startswith("sqlite:///"):
db_path = Path(DB_URL.replace("sqlite:///", ""))
if db_path.exists():
zf.write(db_path, arcname=f"db/{db_path.name}")
# ๋ณด์กด ๊ธฐ๊ฐ ์ง๋ ๋ฐฑ์
์ญ์
cutoff = dt.datetime.now() - dt.timedelta(days=RETENTION_DAYS)
for f in BACKUP_DIR.glob("snapshot_*.zip"):
ts = f.name.split("_")[-1].replace(".zip","")
try:
dt_obj = dt.datetime.strptime(ts, "%Y%m%d_%H%M%S")
if dt_obj < cutoff:
f.unlink()
except: # ํฌ๋งท ๊นจ์ง ํ์ผ ๋ฌด์
pass
log_info("backup rotated", archive=str(archive))
return archive
# ========= ํฌ์ค ์ฒดํฌ (HTTP) =========
LAST_OK = {"collect": None, "orders": None, "backup": None}
class HealthHandler(BaseHTTPRequestHandler):
def do_GET(self):
if self.path.startswith("/health"):
ok = all(LAST_OK.values()) # ๋ชจ๋ ํ ๋ฒ ์ด์ ์ฑ๊ณตํ๋์ง
payload = {
"env": ENV,
"ok": ok,
"last_ok": {k: (v.isoformat()+"Z" if v else None) for k,v in LAST_OK.items()}
}
self.send_response(200 if ok else 503)
self.send_header("Content-Type", "application/json; charset=utf-8")
self.end_headers()
self.wfile.write(json.dumps(payload).encode("utf-8"))
else:
self.send_response(404); self.end_headers()
def _run_health_server():
httpd = HTTPServer(("0.0.0.0", HEALTH_PORT), HealthHandler)
logger.info(json.dumps({"msg":"health server start", "port": HEALTH_PORT}))
httpd.serve_forever()
# ========= ์ค์ผ์ค ๋ฃจํ =========
def scheduler_loop():
"""์์ฃผ ๋จ์ํ ์ค์ผ์ค๋ฌ (๋ถ ๋จ์ ํด๋ง). ์ด์์์ APScheduler/cron ๊ถ์ฅ."""
while True:
now = dt.datetime.now()
try:
# 08:55 ๋ฐ์ดํฐ ๊ฐฑ์ , 09:01 ์ฃผ๋ฌธ ์ฐ์ถ, 18:30 ๋ฐฑ์
hhmm = now.strftime("%H:%M")
if hhmm == "08:55":
collect_factor_data()
LAST_OK["collect"] = dt.datetime.utcnow()
if hhmm == "09:01":
select_and_emit_orders()
LAST_OK["orders"] = dt.datetime.utcnow()
if hhmm == "18:30":
rotate_backups()
LAST_OK["backup"] = dt.datetime.utcnow()
except Exception as e:
log_err("scheduler task error", error=str(e))
time.sleep(30) # 30์ด ๊ฐ๊ฒฉ ํด๋ง
if __name__ == "__main__":
# ์ด๊ธฐ 1ํ ์คํ
try:
collect_factor_data(); LAST_OK["collect"] = dt.datetime.utcnow()
select_and_emit_orders(); LAST_OK["orders"] = dt.datetime.utcnow()
rotate_backups(); LAST_OK["backup"] = dt.datetime.utcnow()
slack_alert("๐ ์ด์ ๋ด ์์ (Ops stack up)")
except Exception as init_e:
log_err("init failure", error=str(init_e))
slack_alert(f"๐จ ์ด๊ธฐํ ์คํจ: {init_e}", level="error")
# ํฌ์ค ์๋ฒ ์ค๋ ๋
t = threading.Thread(target=_run_health_server, daemon=True)
t.start()
# ์ค์ผ์ค ๋ฃจํ
scheduler_loop()
์ฝ๋ ์ค๋ช ํฌ์ธํธ
- ๊ตฌ์กฐ์ JSON ๋ก๊น : RotatingFileHandler๋ก ์ฉ๋ ์ ํ + ํ์ , ๋ถ์ ๋๊ตฌ๋ก ์ฝ๊ฒ ํ์ฑ ๊ฐ๋ฅ
- Slack ์๋ฆผ: ์คํจ ์ ๋ฐ๋ก ๋ฉ์ . ๋คํธ์ํฌ ์คํจ๋ ๋ก๊น ํ๊ณ ๋ฌด์(์๋น์ค ์ง์)
- ์ฌ์๋ ๋ฐฑ์คํ: API/๋คํธ์ํฌ ํ๋ค๋ ค๋ ์๋ ๋ณต๊ตฌ
- ๋ฐฑ์ ํ์ : RETENTION_DAYS ์ด์ ์ค๋ ์ท ์๋ ์ญ์
- ํฌ์ค ์ฒดํฌ: /health → ํ๋ก๋ฉํ ์ฐ์ค·์ ํ์๋ก๋ด·ํด๋ผ์ฐ๋ ๋ชจ๋ํฐ์ ์ฐ๊ฒฐ ์ฉ์ด
- ์ค์ผ์ค๋ง: ์ต์ ์์กด ๋ฒ์ (ํด๋ง). ์ค์ ๋ฐฐํฌ ๋ APScheduler/cron์ผ๋ก ๊ต์ฒด ์ถ์ฒ
2) ์ฅ์ ์ ์ด์ ๋ณต๊ตฌ ์๋๋ฆฌ์ค
์ํฉ ์๋ ๋์ ์๋ ์ ๊ฒ ํฌ์ธํธ
| ๋ฐ์ดํฐ ์์ง ์คํจ | ์ฌ์๋(๋ฐฑ์คํ) + ์ฌ๋ ์๋ฆผ | yfinance/๋คํธ์ํฌ ์ํ, ํฐ์ปค/๊ธฐ๊ฐ ์ ํจ์ฑ |
| ์ฃผ๋ฌธ ํ๋ณด ์ฐ์ถ ์คํจ | ์ฌ์๋ + ์ฌ๋ | ์ ๋ ฅ CSV/ํด๋ ๊ถํ |
| ๋์คํฌ ๋ถ์กฑ | ๋ฐฑ์ ํ์ + ๋ก๊ทธ ํ์ | ์๋ฒ ์ฉ๋/๋ก๊ทธ ๋ ๋ฒจ ์กฐ์ |
| ์ฑ ๋ค์ด | ํ๋ก์ธ์ค ๋งค๋์ ๋ก ์ฌ๊ธฐ๋ | systemd/Docker ์ฌ์์ ์ ์ฑ ํ์ธ |
3) Docker & ์ฌ์์ ์ ์ฑ (์ต์ )
# Dockerfile
FROM python:3.11-slim
WORKDIR /app
COPY requirements.txt ./
RUN pip install -r requirements.txt
COPY . .
CMD ["python", "ops_main.py"]
# docker-compose.yml
services:
quant-ops:
build: .
env_file: .env
restart: unless-stopped
ports:
- "8080:8080" # health
volumes:
- ./data:/app/data
- ./logs:/app/logs
- ./backups:/app/backups
์ปจํ ์ด๋๊ฐ ์ฃฝ์ด๋ restart: unless-stopped๋ก ์๋ ์ฌ๊ธฐ๋.
4) ๋ณด์·๊ฐ์ฉ์ฑ ์ฒดํฌ๋ฆฌ์คํธ
- ๐ ๋น๋ฐ ๊ด๋ฆฌ: .env๋ง ์ฌ์ฉ, ์ ์ฅ์ ์ปค๋ฐ ๊ธ์ง
- ๐งช ๋๋ผ์ด๋ฐ ๋ชจ๋: ์ฃผ๋ฌธ ์คํ ์ “ํ์ผ ์ถ๋ ฅ ๋ชจ๋”๋ก ๊ฒ์ฆ → ๋์ค์ ์ค์ API ๊ต์ฒด
- ๐งญ ๊ด์ธก์ฑ: ์ฌ๋ ์๋ฆผ + /health + ๊ตฌ์กฐ์ ๋ก๊ทธ
- ๐๏ธ ๋ฐฑ์ ์์น: DB/๋ก๊ทธ/๋ฆฌํฌํธ ์ต์ 14์ผ ๋ณด๊ด(๊ท๋ชจ์ ๋ฐ๋ผ ์กฐ์ )
- ๐งฐ ์ฅ์ ๋ฆฌํ์ค: ๋คํธ์ํฌ ๋๊น/๋์คํฌ Full/๋ฐ์ดํฐ ๋๋ฝ ์ํฉ ์ฃผ๊ธฐ ์ ๊ฒ
5) ๋ค์ ๋จ๊ณ(์ ํ)
- APScheduler๋ก ์ ํํ ํฌ๋ก ์ค์ผ์ค ์ ์ฉ
- ์ฌ๋์ ์ฃผ๋ฌธ/์ฑ๊ณผ ์์ฝ ์นด๋(blocks) ์์๊ฒ ์ถ๋ ฅ
- S3/Drive ์ ๋ก๋๋ก ์คํ์ฌ์ดํธ ๋ฐฑ์
- ํ๋ก๋ฉํ ์ฐ์ค/๊ทธ๋ผํ๋ ์ฐ๋์ผ๋ก ๋ฉํธ๋ฆญ ๊ธฐ๋ฐ ๋ชจ๋ํฐ๋ง
์์ฝ
- ๋ฌธ์ ๋ ์ธ์ ๊ฐ ๋ฐ๋์ ๋ฐ์ํฉ๋๋ค.
- ๋ฐ์ ์ฆ์ ๊ฐ์ง·์๋ฆผ·๋ฐฑ์ ·๋ณต๊ตฌ ๋ฃจํด์ด ์์ผ๋ฉด ๋ ๋ฒ๋ ์๊ฐ์ ๊ณ์ ํ๋ฆ ๋๋ค.
- ์ค๋ ์คํฌ๋ฆฝํธ ํ๋๋ก ์ด์์ 80%๋ ์๋ํ๋ฉ๋๋ค. ๋๋จธ์ง 20%๋ ์ ์ง์ ๊ณ ๋ํ!
ํํธ์ด์์๋ํ,์ฌ๋์๋ฆผ,๋ฐฑ์ ์ ๋ต,ํฌ์ค์ฒดํฌ,์ด์๋ชจ๋ํฐ๋ง,ํํธ๋ด,ํ์ด์ฌ๋ก๊น ,์๋๋ฆฌ๋ฐธ๋ฐ์ฑ,DB์ค๋ ์ท,์ฌ์๋๋ฐฑ์คํ
'์ฃผ์' ์นดํ ๊ณ ๋ฆฌ์ ๋ค๋ฅธ ๊ธ
- Total
- Today
- Yesterday
- node.js
- Docker
- JWT
- seo ์ต์ ํ 10๊ฐ
- ai์ฒ ํ
- ๋ฐฑ์๋๊ฐ๋ฐ
- ์ฟ ๋ฒ๋คํฐ์ค
- ๋ฅ๋ฌ๋
- ๊ฐ๋ฐ๋ธ๋ก๊ทธ
- CI/CD
- kotlin
- JAX
- NestJS
- Express
- DevOps
- ์น๊ฐ๋ฐ
- Python
- fastapi
- Redis
- PostgreSQL
- Prisma
- nextJS
- llm
- flax
- REACT
- ์๋ฐ๋ฉด์
- rag
- ํ๋ก ํธ์๋๊ฐ๋ฐ
- SEO์ต์ ํ
- Next.js
| ์ผ | ์ | ํ | ์ | ๋ชฉ | ๊ธ | ํ |
|---|---|---|---|---|---|---|
| 1 | 2 | 3 | 4 | 5 | 6 | 7 |
| 8 | 9 | 10 | 11 | 12 | 13 | 14 |
| 15 | 16 | 17 | 18 | 19 | 20 | 21 |
| 22 | 23 | 24 | 25 | 26 | 27 | 28 |

