drkareemkamal commited on
Commit
e498dfc
·
verified ·
1 Parent(s): 1862bd6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +59 -59
app.py CHANGED
@@ -1,59 +1,59 @@
1
- import streamlit as st
2
- from langchain.prompts import PromptTemplate
3
- from langchain.llms import CTransformers
4
-
5
- # functio to get response from LLAMA 2 model
6
-
7
- def get_llama_response(input_text,no_words,blog_style):
8
-
9
- ### LLama 2 model
10
- llm = CTransformers(model = 'models\llama-2-7b-chat.ggmlv3.q8_0.bin',
11
- model_type = 'llama',
12
- config = {'max_new_tokens': 256,
13
- 'temperature': 0.01})
14
-
15
-
16
- ## Prompt Template
17
- template = """
18
- write a blog for {blog_style} job profile for a topic {input_text}
19
- within {no_words} words
20
- """
21
-
22
- prompt = PromptTemplate(input_vairables =['blog_style','input_text','no_words'],
23
- template = template)
24
-
25
- ## Generate the response from LLMA 2 model
26
-
27
- response = llm(prompt.format(blog_style=blog_style , input_text = input_text , no_words = no_words))
28
- print(response)
29
- return response
30
-
31
-
32
-
33
- st.set_page_config(page_title = 'Generate Blogs',
34
- page_icon = '',
35
- layout = 'centered',
36
- initial_sidebar_state = 'collapsed')
37
-
38
- st.header('Generate Blogs ')
39
-
40
- input_text = st.text_input('Enter the blog Topic')
41
-
42
- ## creating two more columns additional 2 fields
43
-
44
- col1 , col2 = st.columns([5,5])
45
-
46
- with col1 :
47
- no_words = st.text_input('No. of words ')
48
-
49
-
50
- with col2 :
51
- blog_style = st.selectbox('Wiriting the blog for ',
52
- ('Researchers','Data Scientist','Common People'),index=0)
53
-
54
- submit = st.button('Generate')
55
-
56
- ## final response
57
-
58
- if submit :
59
- st.write(get_llama_response(input_text,no_words,blog_style))
 
1
+ import streamlit as st
2
+ from langchain.prompts import PromptTemplate
3
+ from langchain.llms import CTransformers
4
+
5
+ # functio to get response from LLAMA 2 model
6
+
7
+ def get_llama_response(input_text,no_words,blog_style):
8
+
9
+ ### LLama 2 model
10
+ llm = CTransformers(model = 'TheBloke/Llama-2-7B-Chat-GGML',
11
+ model_type = 'llama',
12
+ config = {'max_new_tokens': 256,
13
+ 'temperature': 0.01})
14
+
15
+
16
+ ## Prompt Template
17
+ template = """
18
+ write a blog for {blog_style} job profile for a topic {input_text}
19
+ within {no_words} words
20
+ """
21
+
22
+ prompt = PromptTemplate(input_vairables =['blog_style','input_text','no_words'],
23
+ template = template)
24
+
25
+ ## Generate the response from LLMA 2 model
26
+
27
+ response = llm(prompt.format(blog_style=blog_style , input_text = input_text , no_words = no_words))
28
+ print(response)
29
+ return response
30
+
31
+
32
+
33
+ st.set_page_config(page_title = 'Generate Blogs',
34
+ page_icon = '',
35
+ layout = 'centered',
36
+ initial_sidebar_state = 'collapsed')
37
+
38
+ st.header('Generate Blogs ')
39
+
40
+ input_text = st.text_input('Enter the blog Topic')
41
+
42
+ ## creating two more columns additional 2 fields
43
+
44
+ col1 , col2 = st.columns([5,5])
45
+
46
+ with col1 :
47
+ no_words = st.text_input('No. of words ')
48
+
49
+
50
+ with col2 :
51
+ blog_style = st.selectbox('Wiriting the blog for ',
52
+ ('Researchers','Data Scientist','Common People'),index=0)
53
+
54
+ submit = st.button('Generate')
55
+
56
+ ## final response
57
+
58
+ if submit :
59
+ st.write(get_llama_response(input_text,no_words,blog_style))