rorshi commited on
Commit
6c5b86d
ยท
1 Parent(s): 73db488

Complete step 3 and test wep play (deepseek ver)

Browse files
app.py CHANGED
@@ -8,8 +8,8 @@ def create_app():
8
  app = Flask(__name__) # static/template ๊ฒฝ๋กœ๋ฅผ ๊ธฐ๋ณธ๊ฐ’
9
 
10
  # ๊ฐ ํ”„๋กœ์ ํŠธ๋Š” Blueprint์—์„œ ์ž๊ธฐ static/template ๊ด€๋ฆฌ
11
- app.register_blueprint(npc_bp, url_prefix="/npc")
12
- # app.register_blueprint(stock_bp, url_prefix="/stock")
13
 
14
  @app.route("/")
15
  def index():
 
8
  app = Flask(__name__) # static/template ๊ฒฝ๋กœ๋ฅผ ๊ธฐ๋ณธ๊ฐ’
9
 
10
  # ๊ฐ ํ”„๋กœ์ ํŠธ๋Š” Blueprint์—์„œ ์ž๊ธฐ static/template ๊ด€๋ฆฌ
11
+ app.register_blueprint(npc_bp)
12
+ # app.register_blueprint(stock_bp)
13
 
14
  @app.route("/")
15
  def index():
npc_social_network/models/deepseek_setup.py CHANGED
@@ -1,13 +1,19 @@
1
  from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
 
 
2
 
3
  def load_deepseek():
4
- model_id = "deepseek-ai/deepseek-llm-7b-instruct"
5
 
6
- tokenizer = AutoTokenizer.from_pretrained(model_id)
 
 
 
7
  model = AutoModelForCausalLM.from_pretrained(
8
  model_id,
9
  device_map="auto", # GPU ์ž๋™ ํ• ๋‹น
10
- torch_dtype="auto"
 
11
  )
12
 
13
  pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
 
1
  from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
2
+ from dotenv import load_dotenv
3
+ import os
4
 
5
  def load_deepseek():
6
+ load_dotenv() # .env ํŒŒ์ผ์—์„œ ํ™˜๊ฒฝ ๋ณ€์ˆ˜ ๋กœ๋“œ
7
 
8
+ model_id = "deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B" # ๋ชจ๋ธ ์„ฑ๋Šฅ์ด ์˜ ๋ณ„๋กœ๋„ค(ํ•œ๊ธ€๋กœ ๋‹ต๋ณ€๋„ ๋ชป ํ•ด์ฃผ๊ณ  ๋‹ต๋ณ€๋„ ์ž์—ฐ์Šค๋Ÿฝ์ง€ ๋ชปํ•จ)
9
+ access_token = os.getenv("deepseek_HF_token")
10
+
11
+ tokenizer = AutoTokenizer.from_pretrained(model_id, token=access_token)
12
  model = AutoModelForCausalLM.from_pretrained(
13
  model_id,
14
  device_map="auto", # GPU ์ž๋™ ํ• ๋‹น
15
+ torch_dtype="auto",
16
+ token=access_token
17
  )
18
 
19
  pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
npc_social_network/routes/npc_route.py CHANGED
@@ -40,17 +40,8 @@ def chat():
40
 
41
  ์œ„์˜ ์ •๋ณด๋ฅผ ๋ฐ”ํƒ•์œผ๋กœ {npc.name}์œผ๋กœ์„œ ์ž์—ฐ์Šค๋Ÿฝ๊ณ  ์ผ๊ด€์„ฑ ์žˆ๋Š” ๋‹ต๋ณ€์„ ํ•ด์ฃผ์„ธ์š”.
42
  """
43
-
44
- response = openai.ChatCompletion.create(
45
- model="gpt-4o",
46
- messages=[
47
- {"role": "system", "content": "๋‹น์‹ ์€ ๊ฐ€์ƒ์˜ ์บ๋ฆญํ„ฐ๋กœ์„œ ์„ฑ๊ฒฉ๊ณผ ๊ธฐ์–ต์„ ๋ฐ”ํƒ•์œผ๋กœ ๋Œ€ํ™”๋ฅผ ํ•ฉ๋‹ˆ๋‹ค." }
48
- {"role": "user", "content": prompt}
49
- ],
50
- max_tokens = 150
51
- )
52
-
53
- npc_reply = response.choices[0].message.content.strip()
54
 
55
  # NPC ๊ธฐ์–ต์— ๋Œ€ํ™” ์ถ”๊ฐ€ ๋ฐ ๊ด€๊ณ„ ์ ์ˆ˜ ์—…๋ฐ์ดํŠธ
56
  npc.remember_conversation(user_input, npc_reply)
 
40
 
41
  ์œ„์˜ ์ •๋ณด๋ฅผ ๋ฐ”ํƒ•์œผ๋กœ {npc.name}์œผ๋กœ์„œ ์ž์—ฐ์Šค๋Ÿฝ๊ณ  ์ผ๊ด€์„ฑ ์žˆ๋Š” ๋‹ต๋ณ€์„ ํ•ด์ฃผ์„ธ์š”.
42
  """
43
+ result = llm(prompt, max_new_tokens=150, do_sample=True)[0]["generated_text"]
44
+ npc_reply = result.strip().replace(prompt.strip(), "")
 
 
 
 
 
 
 
 
 
45
 
46
  # NPC ๊ธฐ์–ต์— ๋Œ€ํ™” ์ถ”๊ฐ€ ๋ฐ ๊ด€๊ณ„ ์ ์ˆ˜ ์—…๋ฐ์ดํŠธ
47
  npc.remember_conversation(user_input, npc_reply)
npc_social_network/static/js/npc_chat.js CHANGED
@@ -4,7 +4,7 @@ async function sendMessage() {
4
  const userMessage = document.getElementById("message").value;
5
  const npc = document.getElementById("npc").value;
6
 
7
- const response = await fetch('/chat', {
8
  method: 'POST',
9
  headers: { 'Content-Type': 'application/json' },
10
  body: JSON.stringify({ message: userMessage, npc: npc })
 
4
  const userMessage = document.getElementById("message").value;
5
  const npc = document.getElementById("npc").value;
6
 
7
+ const response = await fetch('/npc_social_network/chat', {
8
  method: 'POST',
9
  headers: { 'Content-Type': 'application/json' },
10
  body: JSON.stringify({ message: userMessage, npc: npc })
npc_social_network/templates/chat.html CHANGED
@@ -1,20 +1,22 @@
1
  <!-- portfolio/npc_social_network/templates/chat.html -->
2
  <!DOCTYPE html>
3
- <html lang="en">
4
- <head>
5
- <meta charset="UTF-8">
6
- <meta name="viewport" content="width=device-width, initial-scale=1.0">
7
- <title>NPC Chat</title>
8
- </head>
9
- <body>
10
- <h1>NPC ์†Œ์…œ ๋„คํŠธ์›Œํฌ</h1>
11
- <select id = "npc">
12
- <option>Alice</option>
13
- <option>Bob</option>
14
- <option>Charlie</option>
15
- </select>
16
- <input type="text" id="message" placeholder="๋ฉ”์„ธ์ง€๋ฅผ ์ž…๋ ฅํ•˜์„ธ์š”"/>
17
- <button onclick="sendMessage()">๋ณด๋‚ด๊ธฐ</button>
18
- <div id="chatBox"></div>
19
- </body>
 
 
20
  </html>
 
1
  <!-- portfolio/npc_social_network/templates/chat.html -->
2
  <!DOCTYPE html>
3
+ <html lang="en">
4
+ <head>
5
+ <meta charset="UTF-8">
6
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
7
+ <title>NPC Chat</title>
8
+ </head>
9
+ <body>
10
+ <h1>NPC ์†Œ์…œ ๋„คํŠธ์›Œํฌ</h1>
11
+ <select id = "npc">
12
+ <option>Alice</option>
13
+ <option>Bob</option>
14
+ <option>Charlie</option>
15
+ </select>
16
+ <input type="text" id="message" placeholder="๋ฉ”์„ธ์ง€๋ฅผ ์ž…๋ ฅํ•˜์„ธ์š”"/>
17
+ <button onclick="sendMessage()">๋ณด๋‚ด๊ธฐ</button>
18
+ <div id="chatBox"></div>
19
+
20
+ <script src="{{ url_for('npc_social.static', filename='js/npc_chat.js') }}"></script>
21
+ </body>
22
  </html>
requirements.txt ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ accelerate==1.7.0
2
+ asttokens @ file:///home/conda/feedstock_root/build_artifacts/asttokens_1733250440834/work
3
+ bitsandbytes==0.45.5
4
+ blinker==1.9.0
5
+ certifi==2025.4.26
6
+ charset-normalizer==3.4.2
7
+ click==8.2.0
8
+ colorama==0.4.6
9
+ comm @ file:///home/conda/feedstock_root/build_artifacts/comm_1733502965406/work
10
+ debugpy @ file:///D:/bld/debugpy_1680755734210/work
11
+ decorator @ file:///home/conda/feedstock_root/build_artifacts/decorator_1740384970518/work
12
+ exceptiongroup @ file:///home/conda/feedstock_root/build_artifacts/exceptiongroup_1746947292760/work
13
+ executing @ file:///home/conda/feedstock_root/build_artifacts/executing_1745502089858/work
14
+ filelock==3.18.0
15
+ Flask==3.1.1
16
+ fsspec==2025.5.0
17
+ huggingface-hub==0.32.0
18
+ idna==3.10
19
+ importlib_metadata @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_importlib-metadata_1747934053/work
20
+ ipykernel @ file:///D:/bld/ipykernel_1719845595208/work
21
+ ipython @ file:///D:/bld/bld/rattler-build_ipython_1745672185/work
22
+ ipython_pygments_lexers @ file:///home/conda/feedstock_root/build_artifacts/ipython_pygments_lexers_1737123620466/work
23
+ itsdangerous==2.2.0
24
+ jedi @ file:///home/conda/feedstock_root/build_artifacts/jedi_1733300866624/work
25
+ Jinja2==3.1.6
26
+ jupyter_client @ file:///home/conda/feedstock_root/build_artifacts/jupyter_client_1733440914442/work
27
+ jupyter_core @ file:///D:/bld/jupyter_core_1727163532151/work
28
+ MarkupSafe==3.0.2
29
+ matplotlib-inline @ file:///home/conda/feedstock_root/build_artifacts/matplotlib-inline_1733416936468/work
30
+ mpmath==1.3.0
31
+ nest_asyncio @ file:///home/conda/feedstock_root/build_artifacts/nest-asyncio_1733325553580/work
32
+ networkx==3.4.2
33
+ numpy==2.2.6
34
+ packaging @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_packaging_1745345660/work
35
+ parso @ file:///home/conda/feedstock_root/build_artifacts/parso_1733271261340/work
36
+ pickleshare @ file:///home/conda/feedstock_root/build_artifacts/pickleshare_1733327343728/work
37
+ pillow==11.0.0
38
+ platformdirs @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_platformdirs_1746710438/work
39
+ prompt_toolkit @ file:///home/conda/feedstock_root/build_artifacts/prompt-toolkit_1744724089886/work
40
+ psutil @ file:///D:/bld/psutil_1681775154857/work
41
+ pure_eval @ file:///home/conda/feedstock_root/build_artifacts/pure_eval_1733569405015/work
42
+ Pygments @ file:///home/conda/feedstock_root/build_artifacts/pygments_1736243443484/work
43
+ python-dateutil @ file:///home/conda/feedstock_root/build_artifacts/python-dateutil_1733215673016/work
44
+ python-dotenv==1.1.0
45
+ pywin32==304
46
+ PyYAML==6.0.2
47
+ pyzmq @ file:///D:/bld/pyzmq_1679317063994/work
48
+ regex==2024.11.6
49
+ requests==2.32.3
50
+ safetensors==0.5.3
51
+ six @ file:///home/conda/feedstock_root/build_artifacts/six_1733380938961/work
52
+ stack_data @ file:///home/conda/feedstock_root/build_artifacts/stack_data_1733569443808/work
53
+ sympy==1.14.0
54
+ tokenizers==0.21.1
55
+ torch==2.7.0+cu118
56
+ torchaudio==2.7.0+cu118
57
+ torchvision==0.22.0+cu118
58
+ tornado @ file:///D:/bld/tornado_1681817639987/work
59
+ tqdm==4.67.1
60
+ traitlets @ file:///home/conda/feedstock_root/build_artifacts/traitlets_1733367359838/work
61
+ transformers==4.52.3
62
+ typing_extensions @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_typing_extensions_1744302253/work
63
+ urllib3==2.4.0
64
+ wcwidth @ file:///home/conda/feedstock_root/build_artifacts/wcwidth_1733231326287/work
65
+ Werkzeug==3.1.3
66
+ zipp @ file:///home/conda/feedstock_root/build_artifacts/zipp_1732827521216/work
test.ipynb CHANGED
@@ -2,7 +2,7 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": null,
6
  "id": "ecf4c973",
7
  "metadata": {},
8
  "outputs": [],
@@ -33,6 +33,41 @@
33
  "# 1. ai ๋ชจ๋ธ ํ•™์Šต์„ ์œ„ํ•ด ์–ด๋–ค ๋‚ด์šฉ์ด ํ•„์š”ํ•œ์ง€\n",
34
  "# 2. ํ•™์Šตํ•œ ๋ชจ๋ธ์„ ์ €์žฅ, ํ™œ์šฉํ•  ์ˆ˜ ์žˆ๋Š”์ง€"
35
  ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  }
37
  ],
38
  "metadata": {
@@ -42,7 +77,15 @@
42
  "name": "python3"
43
  },
44
  "language_info": {
 
 
 
 
 
 
45
  "name": "python",
 
 
46
  "version": "3.11.11"
47
  }
48
  },
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": 2,
6
  "id": "ecf4c973",
7
  "metadata": {},
8
  "outputs": [],
 
33
  "# 1. ai ๋ชจ๋ธ ํ•™์Šต์„ ์œ„ํ•ด ์–ด๋–ค ๋‚ด์šฉ์ด ํ•„์š”ํ•œ์ง€\n",
34
  "# 2. ํ•™์Šตํ•œ ๋ชจ๋ธ์„ ์ €์žฅ, ํ™œ์šฉํ•  ์ˆ˜ ์žˆ๋Š”์ง€"
35
  ]
36
+ },
37
+ {
38
+ "cell_type": "code",
39
+ "execution_count": 1,
40
+ "id": "7469ad02",
41
+ "metadata": {},
42
+ "outputs": [
43
+ {
44
+ "name": "stdout",
45
+ "output_type": "stream",
46
+ "text": [
47
+ "True\n",
48
+ "1\n",
49
+ "NVIDIA GeForce GTX 1070 Ti\n"
50
+ ]
51
+ }
52
+ ],
53
+ "source": [
54
+ "# 1. GPU ์‚ฌ์šฉ ์—ฌ๋ถ€ ํ™•์ธ\n",
55
+ "import torch\n",
56
+ "print(torch.cuda.is_available()) # ๐Ÿ‘‰ True๋ฉด GPU ์‚ฌ์šฉ ๊ฐ€๋Šฅ\n",
57
+ "print(torch.cuda.device_count()) # ๐Ÿ‘‰ ์—ฐ๊ฒฐ๋œ GPU ์ˆ˜\n",
58
+ "print(torch.cuda.get_device_name(0)) # ๐Ÿ‘‰ GPU ์ด๋ฆ„"
59
+ ]
60
+ },
61
+ {
62
+ "cell_type": "code",
63
+ "execution_count": null,
64
+ "id": "b055896e",
65
+ "metadata": {},
66
+ "outputs": [],
67
+ "source": [
68
+ "# GPU ์ง€์› ๋ฒ„์ „์œผ๋กœ ๋‹ค์‹œ ์„ค์น˜\n",
69
+ "# !pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu118"
70
+ ]
71
  }
72
  ],
73
  "metadata": {
 
77
  "name": "python3"
78
  },
79
  "language_info": {
80
+ "codemirror_mode": {
81
+ "name": "ipython",
82
+ "version": 3
83
+ },
84
+ "file_extension": ".py",
85
+ "mimetype": "text/x-python",
86
  "name": "python",
87
+ "nbconvert_exporter": "python",
88
+ "pygments_lexer": "ipython3",
89
  "version": "3.11.11"
90
  }
91
  },