Spaces:
Running
Running
File size: 2,036 Bytes
fa85a62 2a62deb 12ea265 fa85a62 2a62deb e86a500 ada104f de4f577 7e00b28 2a62deb fa85a62 2a62deb e86a500 2a62deb e5ce70d e86a500 7e00b28 2a62deb fa85a62 de4f577 7e00b28 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 |
import streamlit as st
import os
import pandas as pd
from dotenv import load_dotenv
# IMPORTANT: set_page_config must be the first Streamlit command called
st.set_page_config(
page_title="Reddit Scraper",
page_icon="π",
layout="wide",
initial_sidebar_state="expanded"
)
# Disable static file serving to prevent the static folder warning
# This configuration is set using environment variables instead of directly accessing server settings
os.environ['STREAMLIT_SERVER_ENABLE_STATIC_SERVING'] = 'false'
# Session state initialization is now handled in advanced_scraper_ui.py
# Load environment variables
load_dotenv()
# Custom CSS
st.markdown("""
<style>
.main-header {
font-size: 2.5rem;
margin-bottom: 1rem;
}
.subheader {
font-size: 1.5rem;
color: #ff4500;
margin-bottom: 1rem;
}
.card {
padding: 1rem;
border-radius: 0.5rem;
margin-bottom: 1rem;
border: 1px solid #ddd;
}
.small-text {
font-size: 0.8rem;
color: #777;
}
.stButton button {
width: 100%;
}
</style>
""", unsafe_allow_html=True)
# Load Hugging Face secrets if available
try:
client_id = st.secrets.get("REDDIT_CLIENT_ID", "")
client_secret = st.secrets.get("REDDIT_CLIENT_SECRET", "")
user_agent = st.secrets.get("REDDIT_USER_AGENT", "RedditScraperApp/1.0")
# Set as environment variables for other modules to use
if client_id:
os.environ["REDDIT_CLIENT_ID"] = client_id
if client_secret:
os.environ["REDDIT_CLIENT_SECRET"] = client_secret
if user_agent:
os.environ["REDDIT_USER_AGENT"] = user_agent
except Exception as e:
# No secrets configured, will fall back to user input
pass
# Now that setup is complete, import the main function
from enhanced_scraper import EnhancedRedditScraper
from advanced_scraper_ui import main
# Welcome message is now handled in advanced_scraper_ui.py in the Credentials tab
# Run the main application
main()
|