From 542ac3d7dc7d78da2c3faa3692b3ecc909e0208c Mon Sep 17 00:00:00 2001 From: "google-labs-jules[bot]" <161369871+google-labs-jules[bot]@users.noreply.github.com> Date: Tue, 17 Jun 2025 20:32:36 +0000 Subject: [PATCH 1/3] Jules was unable to complete the task in time. Please review the work done so far and provide feedback for Jules to continue. --- components/Charts.jsx | 84 ++ components/auth/withAdminAuth.tsx | 55 + components/layout.tsx | 49 +- components/ui/Button.jsx | 4 + components/ui/ProductCard.jsx | 74 + components/ui/SummaryCard.jsx | 19 + config/appConfig.js | 6 + lib/db.js | 627 ++++++++ lib/email.js | 55 + lib/openai.js | 68 + lib/telegram.js | 49 + lib/vector.js | 94 ++ package.json | 13 +- pages/_app.tsx | 7 +- pages/admin/settings.jsx | 180 +++ pages/admin/upload.jsx | 122 ++ pages/admin/users.jsx | 183 +++ pages/api/admin/users/list.js | 38 + pages/api/admin/users/update-role.js | 65 + pages/api/auth/[...nextauth].js | 101 ++ pages/api/chat.ts | 190 ++- pages/api/generate-report-pdf.js | 199 +++ pages/api/public-chat.js | 137 ++ pages/api/reports-data.js | 76 + pages/api/send-weekly-report.js | 209 +++ pages/api/settings.js | 91 ++ pages/api/upload-csv.js | 186 +++ pages/index.tsx | 66 +- pages/reports.jsx | 185 +++ pages/unauthorized.tsx | 16 + pages/widget.jsx | 151 ++ styles/Widget.module.css | 130 ++ yarn.lock | 2045 ++++++++++++++++++++++++-- 33 files changed, 5370 insertions(+), 204 deletions(-) create mode 100644 components/Charts.jsx create mode 100644 components/auth/withAdminAuth.tsx create mode 100644 components/ui/Button.jsx create mode 100644 components/ui/ProductCard.jsx create mode 100644 components/ui/SummaryCard.jsx create mode 100644 config/appConfig.js create mode 100644 lib/db.js create mode 100644 lib/email.js create mode 100644 lib/openai.js create mode 100644 lib/telegram.js create mode 100644 lib/vector.js create mode 100644 pages/admin/settings.jsx create mode 100644 pages/admin/upload.jsx create mode 100644 pages/admin/users.jsx create mode 100644 pages/api/admin/users/list.js create mode 100644 pages/api/admin/users/update-role.js create mode 100644 pages/api/auth/[...nextauth].js create mode 100644 pages/api/generate-report-pdf.js create mode 100644 pages/api/public-chat.js create mode 100644 pages/api/reports-data.js create mode 100644 pages/api/send-weekly-report.js create mode 100644 pages/api/settings.js create mode 100644 pages/api/upload-csv.js create mode 100644 pages/reports.jsx create mode 100644 pages/unauthorized.tsx create mode 100644 pages/widget.jsx create mode 100644 styles/Widget.module.css diff --git a/components/Charts.jsx b/components/Charts.jsx new file mode 100644 index 000000000..bcc9a1b52 --- /dev/null +++ b/components/Charts.jsx @@ -0,0 +1,84 @@ +import React from 'react'; +import { + LineChart, Line, BarChart, Bar, XAxis, YAxis, CartesianGrid, Tooltip, Legend, ResponsiveContainer, Label +} from 'recharts'; + +export const WeeklyLeadsChart = ({ data }) => { + if (!data || data.length === 0) { + return

No data available for weekly leads chart.

; + } + + // Ensure data is sorted by week_start_date for correct line chart rendering + const sortedData = [...data].sort((a, b) => new Date(a.week_start_date) - new Date(b.week_start_date)); + + + return ( + + + + new Date(dateStr).toLocaleDateString('en-US', { month: 'short', day: 'numeric' })} + /> + + + `Week of: ${new Date(label).toLocaleDateString()}`} + formatter={(value) => [value, 'Leads']} + /> + + + + + ); +}; + +export const MetaAdsPerformanceChart = ({ data }) => { + if (!data || data.length === 0) { + return

No data available for Meta Ads performance chart.

; + } + + // Aggregate data by campaign_name for spend and leads + const aggregatedData = data.reduce((acc, item) => { + const campaign = item.campaign_name || 'Unknown Campaign'; + if (!acc[campaign]) { + acc[campaign] = { name: campaign, spend: 0, leads: 0 }; + } + acc[campaign].spend += item.spend || 0; + acc[campaign].leads += item.leads || 0; + return acc; + }, {}); + + const chartData = Object.values(aggregatedData); + + return ( + + + + + + + + + { + if (name === 'Spend') return [`$${parseFloat(value).toFixed(2)}`, 'Spend']; + return [value, 'Leads']; + }} + /> + + + + + + ); +}; diff --git a/components/auth/withAdminAuth.tsx b/components/auth/withAdminAuth.tsx new file mode 100644 index 000000000..8688d60fa --- /dev/null +++ b/components/auth/withAdminAuth.tsx @@ -0,0 +1,55 @@ +import { useSession } from 'next-auth/react'; +import { useRouter } from 'next/router'; +import React, { ComponentType, useEffect } from 'react'; +import Layout from '../layout'; // Assuming a general layout for error/loading states + +interface WithAdminAuthProps {} + +const withAdminAuth =

(WrappedComponent: ComponentType

) => { + const AdminAuthComponent = (props: P & WithAdminAuthProps) => { + const { data: session, status } = useSession(); + const router = useRouter(); + const loading = status === 'loading'; + + useEffect(() => { + if (!loading && status === 'unauthenticated') { + // Not logged in, redirect to home or a login page + // Alternatively, could use signIn() here: + // signIn('google', { callbackUrl: router.pathname }); + router.push('/'); + } else if (!loading && status === 'authenticated' && session?.user?.role !== 'Admin') { + // Logged in, but not an admin + router.push('/unauthorized'); // Or some other page indicating lack of permission + } + }, [session, status, loading, router]); + + if (loading) { + return

Loading session...

; // Or a dedicated loading component + } + + if (status === 'unauthenticated' || (session && session.user?.role !== 'Admin')) { + // Render null or a message while redirecting, or if redirect fails for some reason + // Or a more specific "Access Denied" component within the Layout + return

Access Denied. Redirecting...

; + } + + // If authenticated and role is Admin, render the wrapped component + return ; + }; + + // Set a display name for easier debugging + AdminAuthComponent.displayName = `WithAdminAuth(${WrappedComponent.displayName || WrappedComponent.name || 'Component'})`; + + return AdminAuthComponent; +}; + +export default withAdminAuth; + +// We should also create the /unauthorized page +// For now, if a non-admin tries to access an admin page, they will be redirected to /unauthorized +// If not logged in, they will be redirected to / +// This HOC will be used to wrap admin pages. +// Example usage: +// import withAdminAuth from '../components/auth/withAdminAuth'; +// const AdminDashboardPage = () =>
Admin Dashboard
; +// export default withAdminAuth(AdminDashboardPage); diff --git a/components/layout.tsx b/components/layout.tsx index 5e3d20700..50ba967e4 100644 --- a/components/layout.tsx +++ b/components/layout.tsx @@ -1,17 +1,56 @@ +import { useSession, signIn, signOut } from 'next-auth/react'; +import Link from 'next/link'; // Import Link for navigation + interface LayoutProps { children?: React.ReactNode; } export default function Layout({ children }: LayoutProps) { + const { data: session, status } = useSession(); + const loading = status === 'loading'; + return (
-
-
diff --git a/components/ui/Button.jsx b/components/ui/Button.jsx new file mode 100644 index 000000000..4271db90a --- /dev/null +++ b/components/ui/Button.jsx @@ -0,0 +1,4 @@ +// Placeholder for a UI Button component +export default function Button({ children, ...props }) { + return ; +} diff --git a/components/ui/ProductCard.jsx b/components/ui/ProductCard.jsx new file mode 100644 index 000000000..67283fdcf --- /dev/null +++ b/components/ui/ProductCard.jsx @@ -0,0 +1,74 @@ +import React from 'react'; +import Image from 'next/image'; // Assuming Next.js Image component for optimization + +// Basic styling - can be expanded with Tailwind or CSS Modules +const cardStyle = { + border: '1px solid #e0e0e0', + borderRadius: '8px', + padding: '16px', + margin: '16px 0', + maxWidth: '350px', // Or adjust as needed + boxShadow: '0 2px 4px rgba(0,0,0,0.1)', +}; + +const imageStyle = { + width: '100%', + height: '200px', // Fixed height, or use aspect ratio + objectFit: 'cover', // Or 'contain' + borderRadius: '4px', + marginBottom: '12px', +}; + +const ProductCard = ({ product }) => { + if (!product) { + return null; + } + + // Destructure with defaults for safety + const { name, description, image_url, claims, nutrition_info, weight } = product; + + return ( +
+ {image_url && ( + {name + )} +

{name || 'Unnamed Product'}

+ {description &&

{description}

} + + {weight &&

Weight: {weight}

} + + {claims && Array.isArray(claims) && claims.length > 0 && ( +
+

Claims:

+
    + {claims.map((claim, index) => ( +
  • {typeof claim === 'string' ? claim : claim.text}
  • + ))} +
+
+ )} + + {nutrition_info && typeof nutrition_info === 'object' && Object.keys(nutrition_info).length > 0 && ( +
+

Nutrition Highlights:

+
    + {/* Example: Displaying a few key nutrition facts. Adjust as needed. */} + {nutrition_info.calories &&
  • Calories: {nutrition_info.calories}
  • } + {nutrition_info.protein &&
  • Protein: {nutrition_info.protein}g
  • } + {/* Add more nutrition details as desired */} +
+
+ )} + {/* Add more fields as needed, e.g., price, link to product page, etc. */} +
+ ); +}; + +export default ProductCard; diff --git a/components/ui/SummaryCard.jsx b/components/ui/SummaryCard.jsx new file mode 100644 index 000000000..aea392242 --- /dev/null +++ b/components/ui/SummaryCard.jsx @@ -0,0 +1,19 @@ +import React from 'react'; + +const SummaryCard = ({ title, value, children, valueClassName }) => { + return ( +
+

+ {title} +

+ {value !== undefined && value !== null && ( +

+ {value} +

+ )} + {children &&
{children}
} +
+ ); +}; + +export default SummaryCard; diff --git a/config/appConfig.js b/config/appConfig.js new file mode 100644 index 000000000..425704d08 --- /dev/null +++ b/config/appConfig.js @@ -0,0 +1,6 @@ +// Placeholder for application level configurations +export const appConfig = { + appName: "My Next App", + version: "1.0.0", + // Add other configurations here +}; diff --git a/lib/db.js b/lib/db.js new file mode 100644 index 000000000..345884ddd --- /dev/null +++ b/lib/db.js @@ -0,0 +1,627 @@ +import { createClient } from '@supabase/supabase-js'; + +const supabaseUrl = process.env.SUPABASE_URL; +const supabaseAnonKey = process.env.SUPABASE_ANON_KEY; + +if (!supabaseUrl || !supabaseAnonKey) { + throw new Error('Supabase URL and Anon Key must be defined in environment variables.'); +} + +export const supabase = createClient(supabaseUrl, supabaseAnonKey); + +/* +SQL for creating the 'users' table in Supabase: + +CREATE TABLE public.users ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + email text NOT NULL, + name text NULL, + image text NULL, + role text NOT NULL DEFAULT 'User'::text, + "createdAt" timestamptz NOT NULL DEFAULT now(), + "updatedAt" timestamptz NOT NULL DEFAULT now(), + CONSTRAINT users_pkey PRIMARY KEY (id), + CONSTRAINT users_email_key UNIQUE (email) +); + +-- Optional: Enable Row Level Security (RLS) on the table +ALTER TABLE public.users ENABLE ROW LEVEL SECURITY; + +-- Optional: Create policies for RLS (examples below, adjust as needed) + +-- Allow users to read their own data +CREATE POLICY "Allow individual user read access" +ON public.users +FOR SELECT +USING (auth.uid() = id); -- Assumes 'id' in your users table matches Supabase auth.uid() + -- If using NextAuth's user.id (which is token.sub from provider), + -- you might need a different way to link them or a different policy. + -- For NextAuth, you might manage access more at the application layer + -- or ensure the 'id' field is populated with auth.uid() if using Supabase Auth alongside NextAuth. + +-- Allow users to update their own data +CREATE POLICY "Allow individual user update access" +ON public.users +FOR UPDATE +USING (auth.uid() = id); + +-- For NextAuth-centric approach where Supabase is just a DB: +-- You might have simpler policies or manage access primarily through your API logic +-- if Supabase Auth isn't the primary driver for user identification in policies. + +-- A common policy for users table when using NextAuth and Supabase as just a DB: +-- Allow authenticated users to read all users (or specific fields) +-- CREATE POLICY "Allow authenticated read access" +-- ON public.users FOR SELECT USING (auth.role() = 'authenticated'); + +-- It's crucial to align your RLS policies with how you identify and authenticate users +-- (Supabase Auth vs. NextAuth sessions). +-- If NextAuth is the sole auth system, your backend functions (using service_role key or specific user queries) +-- will interact with Supabase, and RLS might be simpler or focused on service-level access. + +-- Don't forget to create a "createdAt" function if it doesn't exist or set default via now() +-- And an "updatedAt" trigger function: +CREATE OR REPLACE FUNCTION public.handle_updated_at() +RETURNS TRIGGER AS $$ +BEGIN + NEW."updatedAt" = now(); + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +CREATE TRIGGER on_users_updated_at +BEFORE UPDATE ON public.users +FOR EACH ROW +EXECUTE PROCEDURE public.handle_updated_at(); + +*/ + +/** + * Fetches a user by their email. + * @param {string} email The user's email. + * @returns {Promise<{data: object | null, error: object | null}>} + */ +export const getUserByEmail = async (email) => { + if (!email) return { data: null, error: { message: 'Email is required.' } }; + return supabase.from('users').select('*').eq('email', email).single(); +}; + +/** + * Creates a new user. + * @param {object} userData The user data (e.g., email, name, image, role). + * @returns {Promise<{data: object | null, error: object | null}>} + */ +export const createUser = async (userData) => { + if (!userData || !userData.email) { + return { data: null, error: { message: 'User data with email is required.' } }; + } + const defaultData = { + role: 'User', // Default role + ...userData, + }; + return supabase.from('users').insert(defaultData).select().single(); +}; + +/** + * Updates a user's role. + * @param {string} userId The ID of the user to update. + * @param {string} role The new role. + * @returns {Promise<{data: object | null, error: object | null}>} + */ +export const updateUserRole = async (userId, role) => { + if (!userId || !role) { + return { data: null, error: { message: 'User ID and role are required.' } }; + } + return supabase.from('users').update({ role }).eq('id', userId).select().single(); +}; + +/** + * Upserts a user based on their email. + * Creates them if they don't exist, or updates their name/image if they do. + * This is useful in the NextAuth signIn callback. + * @param {object} userProfile From NextAuth (user.email, user.name, user.image) + * @returns {Promise<{data: object | null, error: object | null}>} + */ +export const upsertUser = async (userProfile) => { + if (!userProfile || !userProfile.email) { + return { data: null, error: { message: 'User profile with email is required.' } }; + } + + const { data: existingUser, error: fetchError } = await getUserByEmail(userProfile.email); + + if (fetchError && fetchError.code !== 'PGRST116') { // PGRST116: row not found + console.error('Error fetching user for upsert:', fetchError); + return { data: null, error: fetchError }; + } + + if (existingUser) { + // User exists, update name and image if they changed + if (existingUser.name !== userProfile.name || existingUser.image !== userProfile.image) { + const { data, error } = await supabase + .from('users') + .update({ name: userProfile.name, image: userProfile.image }) + .eq('email', userProfile.email) + .select() + .single(); + if (error) console.error('Error updating user:', error); + return { data: data || existingUser, error }; // return updated or existing on error + } + return { data: existingUser, error: null }; // No changes needed + } else { + // User doesn't exist, create them + const { data, error } = await createUser({ + email: userProfile.email, + name: userProfile.name, + image: userProfile.image, + // role: 'User' // createUser will set default role + }); + if (error) console.error('Error creating user:', error); + return { data, error }; + } +}; + +/* +SQL for creating the 'products' table: + +CREATE TABLE public.products ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + name text NOT NULL, + description text NULL, + weight text NULL, -- Or numeric if standard unit + claims jsonb NULL, -- Store as an array of strings or objects + nutrition_info jsonb NULL, -- Store as a structured object + image_url text NULL, + "createdAt" timestamptz NOT NULL DEFAULT now(), + "updatedAt" timestamptz NOT NULL DEFAULT now(), + CONSTRAINT products_pkey PRIMARY KEY (id), + CONSTRAINT products_name_key UNIQUE (name) -- Assuming product names should be unique +); + +-- Apply the updatedAt trigger +CREATE TRIGGER on_products_updated_at +BEFORE UPDATE ON public.products +FOR EACH ROW +EXECUTE PROCEDURE public.handle_updated_at(); + +-- Optional: Enable Row Level Security (RLS) if needed, and define policies. +-- ALTER TABLE public.products ENABLE ROW LEVEL SECURITY; +-- CREATE POLICY "Allow public read access to products" ON public.products FOR SELECT USING (true); +-- CREATE POLICY "Allow admin write access to products" ON public.products FOR ALL USING (auth.role() = 'authenticated' AND (SELECT role FROM public.users WHERE id = auth.uid()) = 'Admin'); -- Example if using Supabase Auth roles + +SQL for creating the 'meta_ads' table: + +CREATE TABLE public.meta_ads ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + campaign_name text NULL, + ad_set_name text NULL, -- Added field + ad_name text NULL, -- Added field + date date NOT NULL, + impressions integer NULL, + clicks integer NULL, + leads integer NULL, -- Or numeric if fractional leads are possible + spend numeric NULL, + -- It's good practice to have a unique constraint for the combination of fields + -- that define a unique ad performance record for a given day. + -- For example, if campaign_name, ad_set_name, ad_name, and date make a record unique: + CONSTRAINT meta_ads_unique_performance_record UNIQUE (campaign_name, ad_set_name, ad_name, date), + "createdAt" timestamptz NOT NULL DEFAULT now(), + "updatedAt" timestamptz NOT NULL DEFAULT now(), + CONSTRAINT meta_ads_pkey PRIMARY KEY (id) +); + +-- Apply the updatedAt trigger +CREATE TRIGGER on_meta_ads_updated_at +BEFORE UPDATE ON public.meta_ads +FOR EACH ROW +EXECUTE PROCEDURE public.handle_updated_at(); + +-- Optional: Enable Row Level Security (RLS) if needed, and define policies. +-- ALTER TABLE public.meta_ads ENABLE ROW LEVEL SECURITY; +-- CREATE POLICY "Allow admin read access to meta_ads" ON public.meta_ads FOR SELECT USING (auth.role() = 'authenticated' AND (SELECT role FROM public.users WHERE id = auth.uid()) = 'Admin'); +-- CREATE POLICY "Allow admin write access to meta_ads" ON public.meta_ads FOR ALL USING (auth.role() = 'authenticated' AND (SELECT role FROM public.users WHERE id = auth.uid()) = 'Admin'); + +*/ + +/** + * Batch upserts product data. + * Uses 'name' as the conflict resolution column. + * @param {Array} productsData Array of product objects. + * @returns {Promise<{data: Array | null, error: object | null}>} + */ +export const upsertProducts = async (productsData) => { + if (!productsData || productsData.length === 0) { + return { data: [], error: null }; // Or an error if productsData is required to be non-empty + } + // Ensure all products have a 'name' for conflict resolution + for (const product of productsData) { + if (!product.name) { + return { data: null, error: { message: 'All products must have a name for upsert.' } }; + } + } + return supabase.from('products').upsert(productsData, { onConflict: 'name' }).select(); +}; + +/** + * Batch upserts Meta Ads data. + * Uses a composite unique constraint for conflict resolution. + * The constraint 'meta_ads_unique_performance_record' should be defined on (campaign_name, ad_set_name, ad_name, date). + * @param {Array} adsData Array of ad data objects. + * @returns {Promise<{data: Array | null, error: object | null}>} + */ +export const upsertMetaAds = async (adsData) => { + if (!adsData || adsData.length === 0) { + return { data: [], error: null }; + } + // Ensure required fields for the unique constraint are present + for (const ad of adsData) { + if (!ad.campaign_name || !ad.ad_set_name || !ad.ad_name || !ad.date) { + return { data: null, error: { message: 'All ads must have campaign_name, ad_set_name, ad_name, and date for upsert.' } }; + } + } + // The onConflict option should match the name of your unique constraint. + // If your constraint is named e.g. 'meta_ads_campaign_name_ad_set_name_ad_name_date_key', use that. + // For this example, I'm using 'meta_ads_unique_performance_record' as defined in the SQL schema comment. + return supabase.from('meta_ads').upsert(adsData, { onConflict: 'campaign_name,ad_set_name,ad_name,date', ignoreDuplicates: false }).select(); +}; + +/* +SQL for creating the 'app_settings' table: + +CREATE TABLE public.app_settings ( + key text NOT NULL, + value jsonb NULL, -- Using jsonb to allow for various data types + "createdAt" timestamptz NOT NULL DEFAULT now(), + "updatedAt" timestamptz NOT NULL DEFAULT now(), + CONSTRAINT app_settings_pkey PRIMARY KEY (key) +); + +-- Apply the updatedAt trigger (assuming handle_updated_at function is already created from users table setup) +CREATE TRIGGER on_app_settings_updated_at +BEFORE UPDATE ON public.app_settings +FOR EACH ROW +EXECUTE PROCEDURE public.handle_updated_at(); + +-- Optional: RLS (Admins can manage settings, authenticated users might read some) +-- ALTER TABLE public.app_settings ENABLE ROW LEVEL SECURITY; +-- CREATE POLICY "Allow admin access to app_settings" ON public.app_settings FOR ALL USING (auth.role() = 'authenticated' AND (SELECT role FROM public.users WHERE id = auth.uid()) = 'Admin'); +-- CREATE POLICY "Allow authenticated users to read app_settings" ON public.app_settings FOR SELECT USING (auth.role() = 'authenticated'); + +*/ + +/** + * Fetches all application settings. + * @returns {Promise<{data: Record | null, error: object | null}>} Object where keys are setting keys and values are setting values. + */ +export const getAllAppSettings = async () => { + const { data, error } = await supabase.from('app_settings').select('key, value'); + if (error) return { data: null, error }; + if (!data) return { data: {}, error: null }; + + // Transform the array of {key, value} objects into a single object + const settings = data.reduce((acc, setting) => { + acc[setting.key] = setting.value; + return acc; + }, {}); + return { data: settings, error: null }; +}; + +/** + * Updates multiple application settings. + * @param {Record} settingsToUpdate Object containing key-value pairs of settings. + * @returns {Promise<{data: Array | null, error: object | null}>} + */ +export const updateAppSettings = async (settingsToUpdate) => { + if (typeof settingsToUpdate !== 'object' || settingsToUpdate === null || Object.keys(settingsToUpdate).length === 0) { + return { data: null, error: { message: 'Invalid or empty settings object provided.' }}; + } + + const upsertData = Object.entries(settingsToUpdate).map(([key, value]) => ({ + key, + value, + })); + + // Upsert based on the 'key' column + return supabase.from('app_settings').upsert(upsertData, { onConflict: 'key' }).select(); +}; + +/** + * Fetches all users from the database. + * @param {object} [options] Options for pagination. + * @param {number} [options.page=1] The page number to fetch. + * @param {number} [options.limit=10] The number of items per page. + * @returns {Promise<{data: Array | null, count: number | null, error: object | null}>} + */ +export const getAllUsers = async ({ page = 1, limit = 10 } = {}) => { + const offset = (page - 1) * limit; + return supabase + .from('users') + .select('*', { count: 'exact' }) // Request total count + .order('createdAt', { ascending: false }) + .range(offset, offset + limit - 1); +}; + +/** + * Fetches a user by their ID. + * @param {string} userId The user's ID. + * @returns {Promise<{data: object | null, error: object | null}>} + */ +export const getUserById = async (userId) => { + if (!userId) return { data: null, error: { message: 'User ID is required.' } }; + return supabase.from('users').select('*').eq('id', userId).single(); +}; + +/* +SQL for creating the 'leads' table: + +CREATE TABLE public.leads ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + user_id uuid NULL, -- Nullable for public widget, references public.users(id) + question text NOT NULL, + response text NULL, -- The LLM's answer + source text NULL, -- e.g., 'seller-chat', 'public-widget' + "createdAt" timestamptz NOT NULL DEFAULT now(), + -- "updatedAt" timestamptz NOT NULL DEFAULT now(), -- Not typically needed for leads unless they are updated + CONSTRAINT leads_pkey PRIMARY KEY (id), + CONSTRAINT leads_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE SET NULL -- Optional: what happens if user is deleted +); + +-- Optional: Indexes for querying +CREATE INDEX idx_leads_user_id ON public.leads(user_id); +CREATE INDEX idx_leads_created_at ON public.leads("createdAt"); + +-- RLS for leads (example: users can see their own leads, admins can see all) +-- ALTER TABLE public.leads ENABLE ROW LEVEL SECURITY; +-- CREATE POLICY "Allow users to see their own leads" ON public.leads FOR SELECT USING (auth.uid() = user_id); +-- CREATE POLICY "Allow admins to see all leads" ON public.leads FOR SELECT USING ((SELECT role FROM public.users WHERE id = auth.uid()) = 'Admin'); +-- For inserts, ensure the user_id is set correctly or allow anonymous inserts if user_id is nullable. +-- CREATE POLICY "Allow authenticated users to insert their own leads" ON public.leads FOR INSERT WITH CHECK (auth.uid() = user_id); +-- If allowing public widget inserts where user_id is NULL: +-- CREATE POLICY "Allow public inserts for leads" ON public.leads FOR INSERT WITH CHECK (user_id IS NULL); + + +*/ + +/** + * Logs a lead (question and optionally response) to the database. + * @param {object} leadData The lead data. + * @param {string} [leadData.user_id] The ID of the user asking the question (if authenticated). + * @param {string} leadData.question The question asked by the user. + * @param {string} [leadData.response] The response provided by the LLM. + * @param {string} [leadData.source] The source of the lead (e.g., 'seller-chat'). + * @returns {Promise<{data: object | null, error: object | null}>} + */ +export const logLead = async (leadData) => { + if (!leadData || !leadData.question) { + return { data: null, error: { message: 'Question is required to log a lead.' } }; + } + + const dataToInsert = { + user_id: leadData.user_id || null, + question: leadData.question, + response: leadData.response || null, + source: leadData.source || 'unknown', + // createdAt is handled by defaultValue in DB + }; + + return supabase.from('leads').insert(dataToInsert).select().single(); +}; + +// Reporting specific functions + +/** + * Gets the total count of leads, optionally within a date range. + * @param {object} [options] Optional parameters. + * @param {string} [options.startDateISO] ISO string for start date (inclusive). + * @param {string} [options.endDateISO] ISO string for end date (inclusive). + * @returns {Promise<{count: number | null, error: object | null}>} + */ +export const getTotalLeadsCount = async (options = {}) => { + const { startDateISO, endDateISO } = options; + let query = supabase.from('leads').select('*', { count: 'exact', head: true }); + + if (startDateISO && endDateISO) { + query = query.gte('createdAt', startDateISO).lte('createdAt', endDateISO); + } else if (startDateISO) { + query = query.gte('createdAt', startDateISO); + } else if (endDateISO) { + query = query.lte('createdAt', endDateISO); + } + + const { count, error } = await query; + return { count, error }; +}; + +/** + * Gets the top N most frequently asked questions from the leads table, optionally within a date range. + * @param {number} [limit=5] Number of top questions to return. + * @param {object} [options] Optional parameters. + * @param {string} [options.startDateISO] ISO string for start date (inclusive). + * @param {string} [options.endDateISO] ISO string for end date (inclusive). + * @returns {Promise<{data: Array<{question: string, count: number}> | null, error: object | null}>} + */ +export const getTopQuestions = async (limit = 5, options = {}) => { + const { startDateISO, endDateISO } = options; + // RPC function is highly recommended for this. The JS implementation below is inefficient. + // Example RPC call (if function `get_top_questions_in_range` is created): + // const { data, error } = await supabase.rpc('get_top_questions_in_range', { + // limit_count: limit, + // start_date_param: startDateISO, + // end_date_param: endDateISO + // }); + // return {data, error}; + + // Fallback JS implementation (inefficient for large datasets) + let query = supabase.from('leads').select('question'); + if (startDateISO && endDateISO) { + query = query.gte('createdAt', startDateISO).lte('createdAt', endDateISO); + } else if (startDateISO) { + query = query.gte('createdAt', startDateISO); + } else if (endDateISO) { + query = query.lte('createdAt', endDateISO); + } + + const { data, error } = await query; + // Creating a DB function `get_top_questions(limit_count INT)` would be more robust. + // + // CREATE OR REPLACE FUNCTION get_top_questions(limit_count INT) + // RETURNS TABLE(question_text TEXT, occurrences BIGINT) AS $$ + // BEGIN + // RETURN QUERY + // SELECT + // question, + // COUNT(*) AS question_count + // FROM + // public.leads + // GROUP BY + // question + // ORDER BY + // question_count DESC + // LIMIT + // limit_count; + // END; + // $$ LANGUAGE plpgsql; + // + // Then call it: const { data, error } = await supabase.rpc('get_top_questions', { limit_count: limit }); + + // For now, fetching raw data and processing in JS (less efficient for large datasets): + const { data, error } = await supabase.from('leads').select('question'); + if (error) return { data: null, error }; + if (!data) return { data: [], error: null }; + + const questionCounts = data.reduce((acc, lead) => { + const question = lead.question.trim().toLowerCase(); // Normalize + acc[question] = (acc[question] || 0) + 1; + return acc; + }, {}); + + const sortedQuestions = Object.entries(questionCounts) + .map(([question, count]) => ({ question, count })) + .sort((a, b) => b.count - a.count) + .slice(0, limit); + + return { data: sortedQuestions, error: null }; +}; + + +/** + * Gets the number of leads per week for the last N weeks. + * @param {number} [weeks=12] Number of weeks to fetch data for. + * @returns {Promise<{data: Array<{week_start_date: string, count: number}> | null, error: object | null}>} + */ +export const getWeeklyLeadsTrend = async (weeks = 12) => { + // This also typically requires date truncation and grouping, best done with a DB function or view. + // Supabase JS client might require RPC for this too. + // + // CREATE OR REPLACE FUNCTION get_weekly_leads(num_weeks INT) + // RETURNS TABLE(week_start TEXT, lead_count BIGINT) AS $$ + // BEGIN + // RETURN QUERY + // SELECT + // to_char(date_trunc('week', "createdAt"), 'YYYY-MM-DD') AS week_start_date, + // COUNT(*) AS weekly_leads_count + // FROM + // public.leads + // WHERE + // "createdAt" >= date_trunc('week', NOW() - (num_weeks || ' weeks')::interval) + // GROUP BY + // week_start_date + // ORDER BY + // week_start_date ASC; + // END; + // $$ LANGUAGE plpgsql; + // + // Then call: const { data, error } = await supabase.rpc('get_weekly_leads', { num_weeks: weeks }); + + // Simpler JS implementation for now (less efficient, fetches more data then processes): + // Calculate the date N weeks ago + const startDate = new Date(); + startDate.setDate(startDate.getDate() - weeks * 7); + + const { data, error } = await supabase + .from('leads') + .select('createdAt') + .gte('createdAt', startDate.toISOString()); + + if (error) return { data: null, error }; + if (!data) return { data: [], error: null }; + + const weeklyData = data.reduce((acc, lead) => { + const leadDate = new Date(lead.createdAt); + const dayOfWeek = leadDate.getUTCDay(); // Sunday = 0, Monday = 1, etc. + // Calculate the start of the week (assuming week starts on Sunday for simplicity here) + const weekStart = new Date(leadDate); + weekStart.setUTCDate(leadDate.getUTCDate() - dayOfWeek); + const weekStartDateString = weekStart.toISOString().split('T')[0]; + + acc[weekStartDateString] = (acc[weekStartDateString] || 0) + 1; + return acc; + }, {}); + + const trendData = Object.entries(weeklyData) + .map(([week_start_date, count]) => ({ week_start_date, count })) + .sort((a, b) => new Date(a.week_start_date) - new Date(b.week_start_date)); + + return { data: trendData, error: null }; +}; + + +/** + * Fetches a summary of Meta Ads performance, optionally within a date range. + * @param {object} [options] Optional parameters. + * @param {string} [options.startDateISO] ISO string for start date (inclusive, applies to ad 'date' field). + * @param {string} [options.endDateISO] ISO string for end date (inclusive, applies to ad 'date' field). + * @returns {Promise<{data: object | null, error: object | null}>} + */ +export const getMetaAdsSummary = async (options = {}) => { + const { startDateISO, endDateISO } = options; + let query = supabase.from('meta_ads').select('spend, impressions, clicks, leads'); + + if (startDateISO && endDateISO) { + query = query.gte('date', startDateISO).lte('date', endDateISO); + } else if (startDateISO) { + query = query.gte('date', startDateISO); + } else if (endDateISO) { + query = query.lte('date', endDateISO); + } + + const { data, error } = await query; + + if (error) return { data: null, error }; + if (!data || data.length === 0) return { data: { totalSpend: 0, totalImpressions: 0, totalClicks: 0, totalLeads: 0, avgCPL: 0, avgCTR: 0 }, error: null }; + + const summary = data.reduce((acc, ad) => { + acc.totalSpend += ad.spend || 0; + acc.totalImpressions += ad.impressions || 0; + acc.totalClicks += ad.clicks || 0; + acc.totalLeads += ad.leads || 0; + return acc; + }, { totalSpend: 0, totalImpressions: 0, totalClicks: 0, totalLeads: 0 }); + + summary.avgCPL = summary.totalLeads > 0 ? summary.totalSpend / summary.totalLeads : 0; + summary.avgCTR = summary.totalImpressions > 0 ? summary.totalClicks / summary.totalImpressions : 0; + + return { data: summary, error: null }; +}; + + +/** + * Fetches all Meta Ads campaign data, potentially with calculated metrics. + * @returns {Promise<{data: Array | null, error: object | null}>} + */ +export const getAllMetaAdsCampaignData = async () => { + const { data, error } = await supabase + .from('meta_ads') + .select('*') // Select all fields + .order('date', { ascending: false }); // Example ordering + + if (error) return { data: null, error }; + + // Calculate metrics like CTR, CPL per row if needed, or do it on client-side + const processedData = data?.map(ad => ({ + ...ad, + ctr: ad.impressions > 0 ? (ad.clicks / ad.impressions) : 0, + cpl: ad.leads > 0 ? (ad.spend / ad.leads) : 0, + })) || []; + + return { data: processedData, error: null }; +}; diff --git a/lib/email.js b/lib/email.js new file mode 100644 index 000000000..518680cc3 --- /dev/null +++ b/lib/email.js @@ -0,0 +1,55 @@ +import sgMail from '@sendgrid/mail'; + +const SENDGRID_API_KEY = process.env.SENDGRID_API_KEY; +const SENDGRID_FROM_EMAIL = process.env.SENDGRID_FROM_EMAIL; // Verified sender + +if (SENDGRID_API_KEY) { + sgMail.setApiKey(SENDGRID_API_KEY); + console.log("SendGrid mail client configured."); +} else { + console.warn("SENDGRID_API_KEY is not set. Email functionality will be disabled."); +} + +/** + * Sends an email with an attachment. + * @param {object} mailOptions + * @param {string} mailOptions.to Recipient email address. + * @param {string} mailOptions.subject Email subject. + * @param {string} mailOptions.text Plain text body (optional). + * @param {string} mailOptions.html HTML body. + * @param {Array<{content: string, filename: string, type: string, disposition: string}>} mailOptions.attachments Array of attachment objects. + * Content should be base64 encoded string. + * @returns {Promise} + * @throws {Error} If sending fails or SendGrid is not configured. + */ +export const sendEmailWithAttachment = async ({ to, subject, text, html, attachments }) => { + if (!SENDGRID_API_KEY) { + throw new Error("SendGrid API Key not configured. Cannot send email."); + } + if (!SENDGRID_FROM_EMAIL) { + throw new Error("SendGrid From Email not configured. Cannot send email."); + } + if (!to) { + throw new Error("Recipient email ('to') is required."); + } + + const msg = { + to, + from: SENDGRID_FROM_EMAIL, + subject, + text, // Optional: for clients that don't render HTML + html, + attachments, // Example: [{ content: 'base64EncodedString', filename: 'report.pdf', type: 'application/pdf', disposition: 'attachment' }] + }; + + try { + await sgMail.send(msg); + console.log(`Email sent successfully to ${to} with subject "${subject}".`); + } catch (error) { + console.error("Error sending email via SendGrid:", error); + if (error.response) { + console.error("SendGrid error details:", error.response.body); + } + throw new Error(`Failed to send email: ${error.message}`); + } +}; diff --git a/lib/openai.js b/lib/openai.js new file mode 100644 index 000000000..02412b4b7 --- /dev/null +++ b/lib/openai.js @@ -0,0 +1,68 @@ +import { OpenAIEmbeddings } from '@langchain/openai'; + +if (!process.env.OPENAI_API_KEY) { + // This will only throw an error when the file is loaded, + // which might be at build time or when an API route/page using it is first hit. + // Consider a check within the function if you want to allow the app to run + // without OPENAI_API_KEY if embedding generation is optional. + console.warn("OPENAI_API_KEY is not set. Embedding generation will fail."); +} + +const embeddings = new OpenAIEmbeddings({ + openAIApiKey: process.env.OPENAI_API_KEY, + modelName: "text-embedding-ada-002", // Or your preferred model +}); + +/** + * Generates an embedding for a product based on its name, description, and claims. + * @param {object} product The product object. + * @param {string} product.name + * @param {string} [product.description] + * @param {string[] | object[]} [product.claims] + * @returns {Promise} The embedding vector, or null if input is insufficient or error occurs. + */ +export const getProductEmbedding = async (product) => { + if (!product || !product.name) { + console.warn("Product name is required to generate embedding."); + return null; + } + + if (!process.env.OPENAI_API_KEY) { + console.error("OpenAI API key not configured. Cannot generate embeddings."); + // Optionally, you could throw an error here to make it more explicit + // throw new Error("OpenAI API key not configured."); + return null; + } + + // Concatenate relevant text fields to create a single string for embedding + let textToEmbed = `Product Name: ${product.name}`; + if (product.description) { + textToEmbed += `\nDescription: ${product.description}`; + } + if (product.claims && product.claims.length > 0) { + // Assuming claims is an array of strings or objects with a 'text' property + const claimsText = product.claims + .map(claim => (typeof claim === 'string' ? claim : claim.text)) + .filter(Boolean) + .join(', '); + if (claimsText) { + textToEmbed += `\nClaims: ${claimsText}`; + } + } + // Add other relevant fields like nutrition_info if they are textual and relevant for similarity + // e.g., if (product.nutrition_info?.ingredients) textToEmbed += `\nIngredients: ${product.nutrition_info.ingredients}`; + + if (textToEmbed.length === `Product Name: ${product.name}`.length && !product.description && (!product.claims || product.claims.length === 0) ) { + // Only product name was provided, which is fine, but just noting if other fields were expected. + // console.log(`Generating embedding for product "${product.name}" using name only.`); + } + + try { + const vector = await embeddings.embedQuery(textToEmbed); + return vector; + } catch (error) { + console.error(`Error generating embedding for product "${product.name}":`, error); + // Potentially throw the error if the caller should handle retry/failure explicitly + return null; + } +}; diff --git a/lib/telegram.js b/lib/telegram.js new file mode 100644 index 000000000..8f5138db8 --- /dev/null +++ b/lib/telegram.js @@ -0,0 +1,49 @@ +import TelegramBot from 'node-telegram-bot-api'; + +const TELEGRAM_BOT_TOKEN = process.env.TELEGRAM_BOT_TOKEN; +let bot; + +if (TELEGRAM_BOT_TOKEN) { + bot = new TelegramBot(TELEGRAM_BOT_TOKEN); // No polling needed if only sending messages + console.log("Telegram bot client initialized."); +} else { + console.warn("TELEGRAM_BOT_TOKEN is not set. Telegram functionality will be disabled."); +} + +/** + * Sends a PDF document via Telegram. + * @param {string|number} chatId The target chat ID. + * @param {Buffer} pdfBuffer The PDF content as a Buffer. + * @param {string} [caption] Optional caption for the document. + * @param {string} [filename='report.pdf'] Optional filename for the document. + * @returns {Promise} + * @throws {Error} If sending fails or Telegram bot is not configured. + */ +export const sendTelegramReport = async (chatId, pdfBuffer, caption = '', filename = 'report.pdf') => { + if (!bot) { + throw new Error("Telegram Bot Token not configured. Cannot send message."); + } + if (!chatId) { + throw new Error("Telegram Chat ID ('chatId') is required."); + } + if (!pdfBuffer || pdfBuffer.length === 0) { + throw new Error("PDF Buffer is empty or invalid."); + } + + try { + // Send the document + // The `file` option needs to be a Buffer or a Stream. + // `filename` is passed in the options object for `sendDocument`. + await bot.sendDocument( + chatId, + pdfBuffer, + { caption: caption }, + { filename: filename, contentType: 'application/pdf' } + ); + console.log(`Telegram document sent successfully to chat ID ${chatId}.`); + } catch (error) { + console.error("Error sending document via Telegram:", error); + // Check for specific error details if available (e.g., error.response.body on API errors) + throw new Error(`Failed to send Telegram document: ${error.message}`); + } +}; diff --git a/lib/vector.js b/lib/vector.js new file mode 100644 index 000000000..636a6c6bc --- /dev/null +++ b/lib/vector.js @@ -0,0 +1,94 @@ +import { Pinecone } from '@pinecone-database/pinecone'; + +// Configuration constants (consider moving to a dedicated config file or using process.env directly) +const PINECONE_INDEX_NAME = process.env.PINECONE_INDEX_NAME; +const PINECONE_API_KEY = process.env.PINECONE_API_KEY; +const PINECONE_ENVIRONMENT = process.env.PINECONE_ENVIRONMENT; + +// Define a specific namespace for products, as per subtask requirements +const PINECONE_NAMESPACE_PRODUCTS = process.env.PINECONE_NAMESPACE_PRODUCTS || 'products-namespace'; + + +let pineconeClient = null; + +/** + * Initializes and returns a Pinecone client instance. + * Throws an error if environment variables are missing or initialization fails. + */ +async function initPineconeClient() { + if (!PINECONE_ENVIRONMENT || !PINECONE_API_KEY || !PINECONE_INDEX_NAME) { + throw new Error( + 'Pinecone environment, API key, or index name vars are missing. Check environment variables.' + ); + } + + try { + const pinecone = new Pinecone({ + apiKey: PINECONE_API_KEY, + environment: PINECONE_ENVIRONMENT, + }); + // Test connection or list indexes to ensure client is working (optional) + // await pinecone.listIndexes(); + return pinecone; + } catch (error) { + console.error('Failed to initialize Pinecone Client:', error); + throw new Error('Failed to initialize Pinecone Client'); + } +} + +/** + * Returns a shared instance of the Pinecone client, initializing it if necessary. + */ +export const getPineconeClient = async () => { + if (!pineconeClient) { + pineconeClient = await initPineconeClient(); + } + return pineconeClient; +}; + +/** + * Upserts a product vector into the Pinecone index. + * @param {string} productId The unique ID of the product. + * @param {number[]} embedding The embedding vector for the product. + * @param {object} metadata Optional metadata for the vector. + * @returns {Promise} The result of the upsert operation. + */ +export const upsertProductVector = async (productId, embedding, metadata = {}) => { + if (!productId || !embedding || embedding.length === 0) { + throw new Error('Product ID and a non-empty embedding vector are required for upsert.'); + } + + const client = await getPineconeClient(); + const index = client.Index(PINECONE_INDEX_NAME); + + const vector = { + id: productId, + values: embedding, + metadata: { + ...metadata, // Include any other relevant metadata + productId: productId, // Ensure productId is part of metadata for easier lookup/filtering + lastUpdated: new Date().toISOString(), + }, + }; + + try { + // Upsert into the defined product namespace + const upsertResponse = await index.namespace(PINECONE_NAMESPACE_PRODUCTS).upsert([vector]); + console.log(`Successfully upserted vector for product ${productId} into namespace ${PINECONE_NAMESPACE_PRODUCTS}.`); + return upsertResponse; + } catch (error) { + console.error(`Error upserting vector for product ${productId} to Pinecone:`, error); + throw new Error(`Failed to upsert vector to Pinecone: ${error.message}`); + } +}; + +// Example of how to get the index and namespace for other operations if needed: +// export const getProductVectorIndex = async () => { +// const client = await getPineconeClient(); +// return client.Index(PINECONE_INDEX_NAME).namespace(PINECONE_NAMESPACE_PRODUCTS); +// }; + +// Note: The original `pinecone-client.ts` immediately awaited `initPinecone()`. +// Here, `getPineconeClient` provides a lazy initialization, which is often better for serverless environments +// as client initialization only happens when needed. +// Ensure PINECONE_NAMESPACE_PRODUCTS is set in your environment variables if you want to override the default 'products-namespace'. diff --git a/package.json b/package.json index 08071dec8..af1b687a6 100644 --- a/package.json +++ b/package.json @@ -15,18 +15,27 @@ "ingest": "tsx -r dotenv/config scripts/ingest-data.ts" }, "dependencies": { + "@langchain/openai": "^0.5.13", "@microsoft/fetch-event-source": "^2.0.1", - "@pinecone-database/pinecone": "0.0.14", + "@pinecone-database/pinecone": "^6.1.1", "@radix-ui/react-accordion": "^1.1.1", + "@sendgrid/mail": "^8.1.5", + "@supabase/supabase-js": "^2.50.0", "clsx": "^1.2.1", + "csv-parse": "^5.6.0", "dotenv": "^16.0.3", - "langchain": "0.0.82", + "formidable": "^3.5.4", + "langchain": "^0.3.28", "lucide-react": "^0.125.0", "next": "13.2.3", + "next-auth": "^4.24.11", + "node-telegram-bot-api": "^0.66.0", + "pdf-lib": "^1.17.1", "pdf-parse": "1.1.1", "react": "18.2.0", "react-dom": "18.2.0", "react-markdown": "^8.0.5", + "recharts": "^2.15.3", "tailwind-merge": "^1.10.0" }, "devDependencies": { diff --git a/pages/_app.tsx b/pages/_app.tsx index 316a0acd6..357bd1961 100644 --- a/pages/_app.tsx +++ b/pages/_app.tsx @@ -1,5 +1,6 @@ import '@/styles/base.css'; import type { AppProps } from 'next/app'; +import { SessionProvider } from 'next-auth/react'; import { Inter } from 'next/font/google'; const inter = Inter({ @@ -7,13 +8,13 @@ const inter = Inter({ subsets: ['latin'], }); -function MyApp({ Component, pageProps }: AppProps) { +function MyApp({ Component, pageProps: { session, ...pageProps } }: AppProps) { return ( - <> +
- +
); } diff --git a/pages/admin/settings.jsx b/pages/admin/settings.jsx new file mode 100644 index 000000000..2102ed028 --- /dev/null +++ b/pages/admin/settings.jsx @@ -0,0 +1,180 @@ +import React, { useState, useEffect } from 'react'; +import withAdminAuth from '../../components/auth/withAdminAuth'; +import Layout from '../../components/layout'; + +const initialFormState = { + pineconeIndexName: '', // Example non-sensitive setting + defaultItemsPerPage: 10, // Another example + // Add other non-sensitive settings here +}; + +// Define which keys from the fetched settings are considered non-sensitive and editable +const editableSettingsKeys = ['pineconeIndexName', 'defaultItemsPerPage']; + +function AdminSettingsPageContent() { + const [settings, setSettings] = useState(initialFormState); + const [apiKeyStatuses, setApiKeyStatuses] = useState({}); + const [isLoading, setIsLoading] = useState(true); + const [isSaving, setIsSaving] = useState(false); + const [message, setMessage] = useState({ type: '', content: '' }); + + useEffect(() => { + fetchSettings(); + }, []); + + const fetchSettings = async () => { + setIsLoading(true); + try { + const response = await fetch('/api/settings'); + if (!response.ok) { + const errorData = await response.json(); + throw new Error(errorData.message || 'Failed to fetch settings'); + } + const data = await response.json(); + + const editableData = {}; + const statuses = data.apiKeyStatuses || {}; + + for (const key of editableSettingsKeys) { + if (data[key] !== undefined) { + editableData[key] = data[key]; + } + } + + setSettings(prev => ({ ...prev, ...editableData })); + setApiKeyStatuses(statuses); + setMessage({ type: 'success', content: 'Settings loaded.' }); + } catch (error) { + console.error("Error fetching settings:", error); + setMessage({ type: 'error', content: error.message }); + } finally { + setIsLoading(false); + } + }; + + const handleInputChange = (event) => { + const { name, value, type, checked } = event.target; + setSettings(prev => ({ + ...prev, + [name]: type === 'checkbox' ? checked : (type === 'number' ? parseInt(value, 10) : value), + })); + }; + + const handleSubmit = async (event) => { + event.preventDefault(); + setIsSaving(true); + setMessage({ type: '', content: '' }); + + // Prepare only the editable settings for saving + const settingsToSave = {}; + for (const key of editableSettingsKeys) { + if (settings[key] !== undefined) { + settingsToSave[key] = settings[key]; + } + } + + try { + const response = await fetch('/api/settings', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ settingsToUpdate: settingsToSave }), + }); + const result = await response.json(); + if (!response.ok) { + throw new Error(result.message || 'Failed to save settings'); + } + setMessage({ type: 'success', content: 'Settings saved successfully!' }); + // Optionally re-fetch settings or update state based on response + if (result.updated) { + const updatedEditableData = {}; + result.updated.forEach(item => { + if (editableSettingsKeys.includes(item.key)) { + updatedEditableData[item.key] = item.value; + } + }); + setSettings(prev => ({ ...prev, ...updatedEditableData })); + } + } catch (error) { + console.error("Error saving settings:", error); + setMessage({ type: 'error', content: error.message }); + } finally { + setIsSaving(false); + } + }; + + return ( +
+

Admin Settings

+ {message.content && ( +

+ {message.content} +

+ )} + + {isLoading ? ( +

Loading settings...

+ ) : ( + <> +
+

API Key Statuses

+
    + {Object.entries(apiKeyStatuses).map(([key, status]) => ( +
  • + {key.replace(/_/g, ' ')}: {status} +
  • + ))} +
+

Sensitive API keys are managed via environment variables on the server (e.g., in Vercel).

+
+ +
+
+

Application Settings

+

These settings are stored in the database.

+ +
+ + + Note: Actual Pinecone index name usage is often direct from env var in `lib/vector.js` for critical operations. +
+ +
+ + +
+ {/* Add more editable settings here, matching `editableSettingsKeys` */} + + +
+
+ + )} +
+ ); +} + +const ProtectedAdminSettingsPage = () => { + return ( + + + + ); +}; + +export default withAdminAuth(ProtectedAdminSettingsPage); diff --git a/pages/admin/upload.jsx b/pages/admin/upload.jsx new file mode 100644 index 000000000..5950871ef --- /dev/null +++ b/pages/admin/upload.jsx @@ -0,0 +1,122 @@ +import React, { useState } from 'react'; +import withAdminAuth from '../../components/auth/withAdminAuth'; +import Layout from '../../components/layout'; + +function UploadPageContent() { + const [productFile, setProductFile] = useState(null); + const [adsFile, setAdsFile] = useState(null); + const [isUploadingProducts, setIsUploadingProducts] = useState(false); + const [isUploadingAds, setIsUploadingAds] = useState(false); + const [uploadStatus, setUploadStatus] = useState({ products: '', ads: '' }); + + const handleFileChange = (setter) => (event) => { + setter(event.target.files[0]); + }; + + const handleSubmit = async (file, type, setLoading, setStatusKey) => { + if (!file) { + setUploadStatus(prev => ({ ...prev, [setStatusKey]: 'Please select a file first.' })); + return; + } + + setLoading(true); + setUploadStatus(prev => ({ ...prev, [setStatusKey]: 'Uploading...' })); + + const formData = new FormData(); + formData.append('file', file); + formData.append('type', type); // To distinguish on the backend + + try { + const response = await fetch('/api/upload-csv', { + method: 'POST', + body: formData, + // Headers are not explicitly set for 'multipart/form-data' with FormData, + // the browser will set it correctly including the boundary. + }); + + const result = await response.json(); + + if (response.ok) { + setUploadStatus(prev => ({ ...prev, [setStatusKey]: `Success: ${result.message} (${result.processedCount} records)` })); + } else { + setUploadStatus(prev => ({ ...prev, [setStatusKey]: `Error: ${result.message}` })); + } + } catch (error) { + console.error(`Error uploading ${type} CSV:`, error); + setUploadStatus(prev => ({ ...prev, [setStatusKey]: `Error: ${error.message || 'Upload failed'}` })); + } finally { + setLoading(false); + } + }; + + return ( +
+

Admin Upload Page

+

This page is protected and only accessible by users with the 'Admin' role.

+ +
+

Upload Products CSV

+
{ + e.preventDefault(); + handleSubmit(productFile, 'products', setIsUploadingProducts, 'products'); + }} + encType="multipart/form-data" + > +
+ + +
+ + {uploadStatus.products &&

{uploadStatus.products}

} +
+
+ +
+

Upload Meta Ads CSV

+
{ + e.preventDefault(); + handleSubmit(adsFile, 'metaAds', setIsUploadingAds, 'ads'); + }} + encType="multipart/form-data" + > +
+ + +
+ + {uploadStatus.ads &&

{uploadStatus.ads}

} +
+
+
+ ); +} + +const ProtectedUploadPage = () => { + return ( + + + + ); +}; + +export default withAdminAuth(ProtectedUploadPage); diff --git a/pages/admin/users.jsx b/pages/admin/users.jsx new file mode 100644 index 000000000..3b8c5f6c8 --- /dev/null +++ b/pages/admin/users.jsx @@ -0,0 +1,183 @@ +import React, { useState, useEffect, useCallback } from 'react'; +import withAdminAuth from '../../components/auth/withAdminAuth'; +import Layout from '../../components/layout'; + +const ITEMS_PER_PAGE = 10; // Or fetch from settings +const ALLOWED_ROLES = ['User', 'Admin']; // Roles that can be assigned + +function AdminUsersPageContent() { + const [users, setUsers] = useState([]); + const [isLoading, setIsLoading] = useState(true); + const [error, setError] = useState(''); + const [message, setMessage] = useState(''); + + const [currentPage, setCurrentPage] = useState(1); + const [totalPages, setTotalPages] = useState(1); + const [totalUsers, setTotalUsers] = useState(0); + + // For optimistic updates or tracking individual user changes + const [updatingRoles, setUpdatingRoles] = useState({}); // { userId: boolean } + + const fetchUsers = useCallback(async (page = 1) => { + setIsLoading(true); + setError(''); + try { + const response = await fetch(`/api/admin/users/list?page=${page}&limit=${ITEMS_PER_PAGE}`); + if (!response.ok) { + const errData = await response.json(); + throw new Error(errData.message || 'Failed to fetch users'); + } + const data = await response.json(); + setUsers(data.users || []); + setTotalUsers(data.totalUsers || 0); + setCurrentPage(data.currentPage || 1); + setTotalPages(data.totalPages || 1); + } catch (err) { + console.error("Error fetching users:", err); + setError(err.message); + } finally { + setIsLoading(false); + } + }, []); + + useEffect(() => { + fetchUsers(currentPage); + }, [fetchUsers, currentPage]); + + const handleRoleChange = async (userId, newRole) => { + if (!userId || !newRole) { + setError('User ID or role is missing for update.'); + return; + } + if (!confirm(`Are you sure you want to change this user's role to ${newRole}?`)) { + return; + } + + setUpdatingRoles(prev => ({ ...prev, [userId]: true })); + setMessage(''); + setError(''); + + try { + const response = await fetch('/api/admin/users/update-role', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ userId, newRole }), + }); + const result = await response.json(); + if (!response.ok) { + throw new Error(result.message || 'Failed to update role'); + } + setMessage(`User ${userId} role updated to ${newRole}.`); + // Optimistically update UI or re-fetch + // For simplicity, find user and update their role in local state + setUsers(prevUsers => + prevUsers.map(user => + user.id === userId ? { ...user, role: newRole } : user + ) + ); + } catch (err) { + console.error("Error updating role:", err); + setError(err.message); + } finally { + setUpdatingRoles(prev => ({ ...prev, [userId]: false })); + } + }; + + const handlePageChange = (newPage) => { + if (newPage >= 1 && newPage <= totalPages) { + setCurrentPage(newPage); + } + }; + + return ( +
+

User Management

+ {error &&

Error: {error}

} + {message &&

{message}

} + + {isLoading ? ( +

Loading users...

+ ) : users.length === 0 && !error ? ( +

No users found.

+ ) : ( + <> +

Total Users: {totalUsers}

+ + + + + + + + + + + + + {users.map(user => ( + + + + + + + + + ))} + +
IDEmailNameCurrent RoleChange RoleRegistered
{user.id}{user.email}{user.name || 'N/A'}{user.role} + + {updatingRoles[user.id] && Updating...} + {new Date(user.createdAt).toLocaleDateString()}
+ + {/* Pagination Controls */} +
+ + + Page {currentPage} of {totalPages} + + +
+ + )} +
+ ); +} + +const tableHeaderStyle = { + borderBottom: '2px solid #ddd', + padding: '8px', + textAlign: 'left', + backgroundColor: '#f7f7f7', +}; + +const tableCellStyle = { + borderBottom: '1px solid #eee', + padding: '8px', +}; + + +const ProtectedAdminUsersPage = () => { + return ( + + + + ); +}; + +export default withAdminAuth(ProtectedAdminUsersPage); diff --git a/pages/api/admin/users/list.js b/pages/api/admin/users/list.js new file mode 100644 index 000000000..f3338dfc6 --- /dev/null +++ b/pages/api/admin/users/list.js @@ -0,0 +1,38 @@ +import { getServerSession } from 'next-auth/next'; +import { authOptions } from '../../auth/[...nextauth]'; // Adjusted path +import { getAllUsers } from '../../../../lib/db'; // Adjusted path + +export default async function handler(req, res) { + if (req.method !== 'GET') { + res.setHeader('Allow', 'GET'); + return res.status(405).json({ message: 'Method Not Allowed' }); + } + + const session = await getServerSession(req, res, authOptions); + + if (!session || session.user?.role !== 'Admin') { + return res.status(403).json({ message: 'Forbidden: Access denied.' }); + } + + try { + const page = parseInt(req.query.page) || 1; + const limit = parseInt(req.query.limit) || 10; + + const { data: users, count, error } = await getAllUsers({ page, limit }); + + if (error) { + console.error('Error fetching users:', error); + return res.status(500).json({ message: 'Failed to fetch users.', error: error.message }); + } + + return res.status(200).json({ + users, + totalUsers: count, + currentPage: page, + totalPages: Math.ceil(count / limit), + }); + } catch (error) { + console.error('API error fetching users:', error); + return res.status(500).json({ message: 'An unexpected error occurred.', error: error.message }); + } +} diff --git a/pages/api/admin/users/update-role.js b/pages/api/admin/users/update-role.js new file mode 100644 index 000000000..2fc05865f --- /dev/null +++ b/pages/api/admin/users/update-role.js @@ -0,0 +1,65 @@ +import { getServerSession } from 'next-auth/next'; +import { authOptions } from '../../auth/[...nextauth]'; // Adjusted path +import { updateUserRole, getUserById } from '../../../../lib/db'; // Adjusted path, added getUserById + +export default async function handler(req, res) { + if (req.method !== 'POST') { + res.setHeader('Allow', 'POST'); + return res.status(405).json({ message: 'Method Not Allowed' }); + } + + const session = await getServerSession(req, res, authOptions); + + if (!session || session.user?.role !== 'Admin') { + return res.status(403).json({ message: 'Forbidden: Access denied.' }); + } + + const { userId, newRole } = req.body; + + if (!userId || !newRole) { + return res.status(400).json({ message: 'User ID and new role are required.' }); + } + + // Optional: Prevent admin from changing their own role through this specific UI action + // if (session.user.id === userId) { // Note: session.user.id is the DB user ID from our callback + // return res.status(400).json({ message: "Admins cannot change their own role through this interface." }); + // } + + // Optional: Prevent changing role of the super admin or specific protected users + // const targetUser = await getUserById(userId); // You might need to implement getUserById + // if (targetUser && targetUser.data && targetUser.data.email === process.env.SUPER_ADMIN_EMAIL) { + // return res.status(403).json({ message: "This user's role cannot be changed." }); + // } + + + // Validate newRole (optional, but good practice) + const allowedRoles = ['User', 'Admin']; // Add other roles if they exist + if (!allowedRoles.includes(newRole)) { + return res.status(400).json({ message: `Invalid role: ${newRole}. Allowed roles are: ${allowedRoles.join(', ')}.`}); + } + + try { + const { data: updatedUser, error } = await updateUserRole(userId, newRole); + + if (error) { + console.error(`Error updating role for user ${userId}:`, error); + return res.status(500).json({ message: 'Failed to update user role.', error: error.message }); + } + + if (!updatedUser) { + return res.status(404).json({ message: 'User not found or not updated.'}) + } + + return res.status(200).json({ message: 'User role updated successfully.', user: updatedUser }); + } catch (error) { + console.error('API error updating user role:', error); + return res.status(500).json({ message: 'An unexpected error occurred.', error: error.message }); + } +} + +// Helper function getUserById (if not already in lib/db.js) +// You would typically add this to lib/db.js +// async function getUserById(userId) { +// if (!userId) return { data: null, error: { message: 'User ID is required.' } }; +// return supabase.from('users').select('*').eq('id', userId).single(); +// } diff --git a/pages/api/auth/[...nextauth].js b/pages/api/auth/[...nextauth].js new file mode 100644 index 000000000..98885d291 --- /dev/null +++ b/pages/api/auth/[...nextauth].js @@ -0,0 +1,101 @@ +import NextAuth from 'next-auth'; +import GoogleProvider from 'next-auth/providers/google'; +import { upsertUser, getUserByEmail } from '../../../lib/db'; + +export default NextAuth({ + providers: [ + GoogleProvider({ + clientId: process.env.GOOGLE_CLIENT_ID, + clientSecret: process.env.GOOGLE_CLIENT_SECRET, + }), + // Add other providers here if needed + ], + session: { + strategy: 'jwt', // Using JWT for session management + }, + callbacks: { + async signIn({ user, account, profile }) { + // console.log("signIn callback", { user, account, profile }); + if (!user || !user.email) { + console.error('SignIn callback: User or user email is missing.'); + return false; // Prevent sign-in if email is not present + } + + // Upsert user in the database + const { data: dbUser, error } = await upsertUser({ + email: user.email, + name: user.name, + image: user.image, + }); + + if (error) { + console.error('Error upserting user in signIn callback:', error); + return false; // Prevent sign-in on database error + } + + // You can add logic here to check if user is allowed to sign in based on role or other criteria + // For example, if dbUser.role === 'Disabled' return false + + // Store the database user ID and role onto the user object to pass to JWT callback + // Note: NextAuth `user` object in signIn can be augmented and these properties will pass to JWT `user` param. + if (dbUser) { + user.id = dbUser.id; // Add database ID + user.role = dbUser.role; // Add database role + } + + return true; // Continue sign-in + }, + async jwt({ token, user, account, profile }) { + // console.log("jwt callback", { token, user, account, profile }); + // Persist the OAuth access_token to the token right after signin + if (account) { + token.accessToken = account.access_token; + } + + // If user object exists (it does upon signIn), transfer user.id and user.role to the token. + // These were added in the signIn callback from our database user. + if (user) { + token.id = user.id; // This is our database user ID + token.role = user.role; + } else if (token.email) { + // This case handles when the JWT is re-evaluated (e.g. tab focus, session revalidation) + // and the `user` object is not passed. We need to re-fetch the user role from DB. + // However, `token.sub` usually holds the provider's user ID. + // We need to fetch user by email, as that's our primary unique identifier in the users table. + const { data: dbUser, error } = await getUserByEmail(token.email); + if (dbUser) { + token.id = dbUser.id; + token.role = dbUser.role; + } else if (error) { + console.error('JWT callback: Error fetching user by email:', error); + // Potentially revoke token or set a default/error role + token.role = 'ErrorFetchingRole'; + } + } + return token; + }, + async session({ session, token }) { + // console.log("session callback", { session, token }); + // Send properties to the client, like an access_token, user ID, and user role. + session.accessToken = token.accessToken; // From provider + session.user.id = token.id; // From our database, added in JWT callback + session.user.role = token.role; // From our database, added in JWT callback + // Ensure session.user.email is correctly populated (NextAuth usually does this) + if (token.email && !session.user.email) { + session.user.email = token.email; + } + return session; + }, + }, + pages: { + // signIn: '/auth/signin', // Optionally, define custom sign-in pages + // signOut: '/auth/signout', + // error: '/auth/error', // Error code passed in query string as ?error= + // verifyRequest: '/auth/verify-request', // (used for email/passwordless sign in) + // newUser: '/auth/new-user' // New users will be directed here on first sign in (leave the property out to disable) + }, + // Add database adapter here if you want to use a database for session management + // adapter: SupabaseAdapter({ client: supabase, table: 'nextauth_sessions' }), // Example + secret: process.env.NEXTAUTH_SECRET, + // debug: process.env.NODE_ENV === 'development', // Enable debug messages in development +}); diff --git a/pages/api/chat.ts b/pages/api/chat.ts index b9f41f54d..a8ec75af6 100644 --- a/pages/api/chat.ts +++ b/pages/api/chat.ts @@ -1,19 +1,42 @@ import type { NextApiRequest, NextApiResponse } from 'next'; -import { OpenAIEmbeddings } from 'langchain/embeddings/openai'; -import { PineconeStore } from 'langchain/vectorstores/pinecone'; -import { makeChain } from '@/utils/makechain'; -import { pinecone } from '@/utils/pinecone-client'; -import { PINECONE_INDEX_NAME, PINECONE_NAME_SPACE } from '@/config/pinecone'; +import { getServerSession } from 'next-auth/next'; +import { authOptions } from './auth/[...nextauth]'; // Ensure this path is correct +import { OpenAIEmbeddings } from '@langchain/openai'; +import { OpenAI } from '@langchain/openai'; // LLM for chat completions +import { getPineconeClient } from '../../lib/vector'; // Using our Pinecone client wrapper +import { supabase } from '../../lib/db'; // Direct Supabase client +import { logLead } from '../../lib/db'; +import { PINECONE_INDEX_NAME, PINECONE_NAME_SPACE as DEFAULT_PINECONE_NAMESPACE } from '../../config/pinecone'; // Import default namespace + +// Define a specific namespace for products, as per subtask requirements +const PINECONE_NAMESPACE_PRODUCTS = process.env.PINECONE_NAMESPACE_PRODUCTS || DEFAULT_PINECONE_NAMESPACE || 'products-namespace'; + + +// Initialize OpenAI clients +const embeddings = new OpenAIEmbeddings({ + openAIApiKey: process.env.OPENAI_API_KEY, + modelName: "text-embedding-ada-002", +}); + +const llm = new OpenAI({ + openAIApiKey: process.env.OPENAI_API_KEY, + modelName: 'gpt-3.5-turbo', // Or 'gpt-4' or your preferred model + temperature: 0.7, // Adjust as needed +}); export default async function handler( req: NextApiRequest, res: NextApiResponse, ) { - const { question, history } = req.body; + const { question, history } = req.body; // History might be useful for follow-up questions - console.log('question', question); + // Authenticate user + const session = await getServerSession(req, res, authOptions); + if (!session || !session.user || !session.user.id) { + return res.status(401).json({ error: 'Unauthorized: User not logged in.' }); + } + const userId = session.user.id; // This is the database ID from our NextAuth callbacks - //only accept post requests if (req.method !== 'POST') { res.status(405).json({ error: 'Method not allowed' }); return; @@ -22,34 +45,141 @@ export default async function handler( if (!question) { return res.status(400).json({ message: 'No question in the request' }); } - // OpenAI recommends replacing newlines with spaces for best results + const sanitizedQuestion = question.trim().replaceAll('\n', ' '); try { - const index = pinecone.Index(PINECONE_INDEX_NAME); - - /* create vectorstore*/ - const vectorStore = await PineconeStore.fromExistingIndex( - new OpenAIEmbeddings({}), - { - pineconeIndex: index, - textKey: 'text', - namespace: PINECONE_NAME_SPACE, //namespace comes from your config folder - }, - ); - - //create chain - const chain = makeChain(vectorStore); - //Ask a question using chat history - const response = await chain.call({ - question: sanitizedQuestion, - chat_history: history || [], + // 1. Generate embedding for the question + const questionEmbedding = await embeddings.embedQuery(sanitizedQuestion); + if (!questionEmbedding) { + throw new Error('Failed to generate question embedding.'); + } + + // 2. Query Pinecone for relevant product IDs + const pineconeClient = await getPineconeClient(); + const index = pineconeClient.Index(PINECONE_INDEX_NAME); + + const queryResponse = await index.namespace(PINECONE_NAMESPACE_PRODUCTS).query({ + topK: 5, // Number of top results to fetch + vector: questionEmbedding, + includeMetadata: true, // Assuming product IDs are in metadata + // includeValues: false, // Not needed for this step + }); + + const productIds = queryResponse.matches + ?.map((match) => match.metadata?.productId as string) // Ensure 'productId' matches metadata key + .filter((id): id is string => !!id); + + let productContext = ''; + let retrievedProducts: any[] = []; + + if (productIds && productIds.length > 0) { + // 3. Fetch full product details from Supabase + const { data: products, error: dbError } = await supabase + .from('products') + .select('*') // Select all or specific fields needed for the prompt + .in('id', productIds); + + if (dbError) { + console.error('Supabase error fetching products:', dbError); + // Proceed without product context or throw error? For now, proceed. + } else if (products && products.length > 0) { + retrievedProducts = products; // For potential structured response + productContext = "\n\nHere is some information about potentially relevant products:\n"; + products.forEach((product, idx) => { + productContext += `\nProduct ${idx + 1} (ID: ${product.id}):\n`; + productContext += ` Name: ${product.name}\n`; + if (product.description) productContext += ` Description: ${product.description}\n`; + if (product.claims && Array.isArray(product.claims)) { // Assuming claims is an array of strings or objects + const claimsText = product.claims.map(c => typeof c === 'string' ? c : (c as any).text).join(', '); + if(claimsText) productContext += ` Claims: ${claimsText}\n`; + } else if (product.claims && typeof product.claims === 'object'){ // If claims is a single object + productContext += ` Claims: ${(product.claims as any).text || JSON.stringify(product.claims)}\n`; + } + // Add other relevant fields like nutrition_info, weight, etc. + // if (product.nutrition_info) productContext += ` Nutrition: ${JSON.stringify(product.nutrition_info)}\n`; + }); + } + } + if (!productContext) { + productContext = "\n\nNo specific product information was immediately found for this query. Please answer based on general knowledge if applicable, or state that product details are not available."; + } + + + // 4. Construct detailed prompt for LLM + // Basic history integration (needs refinement for proper conversational flow) + const historyText = (history || []) + .map(([q, a]: [string, string]) => `User: ${q}\nAssistant: ${a}`) + .join('\n\n'); + + const prompt = `You are a helpful product assistant for our company. Your goal is to provide accurate and helpful information about our products to our sales team and executives. +Use the product information provided below to answer the user's question. If the information is not sufficient or not available in the provided context, say so. +Do not make up information. If you use product information, try to subtly weave it into a natural answer. You can list product details if the user asks for specifics or comparisons. +When referring to a product, mention its name. + +Previous conversation: +${historyText} + +Product Information Context: +${productContext} + +User's Question: +${sanitizedQuestion} + +Based on the above, please provide a comprehensive answer:`; + + // 5. Call OpenAI Chat Completion API + const llmResponse = await llm.invoke(prompt); + const answer = typeof llmResponse === 'string' ? llmResponse : JSON.stringify(llmResponse); + + + // 6. Log the lead + try { + await logLead({ + user_id: userId, + question: sanitizedQuestion, + response: answer, + source: 'seller-chat', + }); + } catch (logError) { + console.error('Failed to log lead:', logError); + // Do not fail the entire request if logging fails + } + + // 7. Return response + // For now, returning simple text response. + // Future: structure this to include product cards or structured data. + // For example, the LLM could be prompted to output JSON for product details + // or markers that the frontend can use. + res.status(200).json({ + text: answer, + sourceDocuments: retrievedProducts.map(p => ({ + pageContent: `Product Name: ${p.name}\nDescription: ${p.description}\nClaims: ${JSON.stringify(p.claims)}\nNutrition: ${JSON.stringify(p.nutrition_info)}`, // Example combined content + metadata: { + source: `ProductDB_ID_${p.id}`, + id: p.id, + name: p.name, + image_url: p.image_url, + // Add any other metadata frontend might need, like full product data for the card + productData: p + } + })), + productData: retrievedProducts, // Send back the raw product data for frontend rendering }); - console.log('response', response); - res.status(200).json(response); } catch (error: any) { - console.log('error', error); + console.error('Error in /api/chat handler:', error); + // Log the question attempt even if there's an error + try { + await logLead({ + user_id: userId, + question: sanitizedQuestion, + response: `Error: ${error.message || 'Something went wrong'}`, + source: 'seller-chat-error', + }); + } catch (logError) { + console.error('Failed to log error lead:', logError); + } res.status(500).json({ error: error.message || 'Something went wrong' }); } } diff --git a/pages/api/generate-report-pdf.js b/pages/api/generate-report-pdf.js new file mode 100644 index 000000000..4e04e5f3c --- /dev/null +++ b/pages/api/generate-report-pdf.js @@ -0,0 +1,199 @@ +import { getServerSession } from 'next-auth/next'; +import { authOptions } from './auth/[...nextauth]'; // Adjust path as needed +import { PDFDocument, rgb, StandardFonts, PageSizes } from 'pdf-lib'; +import { + getTotalLeadsCount, + getTopQuestions, + getWeeklyLeadsTrend, + getMetaAdsSummary, + getAllMetaAdsCampaignData, +} from '../../lib/db'; // Adjust path as needed + +// Helper function for drawing text (can be expanded) +async function drawText(page, text, x, y, font, size = 10, color = rgb(0, 0, 0)) { + page.drawText(text, { x, y, font, size, color }); +} + +// Helper to draw a table (very basic implementation) +async function drawTable(page, data, startX, startY, columnWidths, rowHeight, font, fontSize) { + let currentY = startY; + const headerFontColor = rgb(1, 1, 1); // White + const headerBgColor = rgb(0.2, 0.2, 0.2); // Dark Gray + const rowEvenColor = rgb(0.95, 0.95, 0.95); // Light Gray for even rows + const rowOddColor = rgb(1, 1, 1); // White for odd rows + const borderColor = rgb(0.7, 0.7, 0.7); // Light gray border + + if (!data || data.length === 0) { + drawText(page, "No data available for this table.", startX, currentY - rowHeight, font, fontSize); + return currentY - rowHeight * 2; + } + + const headers = Object.keys(data[0]); + + // Draw header + let currentX = startX; + page.drawRectangle({ + x: startX - 2, // A little padding + y: currentY - rowHeight - 2, + width: columnWidths.reduce((a, b) => a + b, 0) + 4, + height: rowHeight + 4, + color: headerBgColor, + }); + for (let i = 0; i < headers.length; i++) { + drawText(page, headers[i], currentX + 5, currentY - rowHeight + 5, font, fontSize, headerFontColor); + page.drawLine({ start: {x: currentX, y: currentY}, end: {x: currentX, y: currentY - rowHeight * (data.length +1)}, color: borderColor, thickness: 0.5 }); + currentX += columnWidths[i]; + } + page.drawLine({ start: {x: currentX, y: currentY}, end: {x: currentX, y: currentY - rowHeight * (data.length +1)}, color: borderColor, thickness: 0.5}); // Last vertical line for header + page.drawLine({ start: {x: startX, y: currentY}, end: {x: currentX, y: currentY}, color: borderColor, thickness: 0.5}); // Top border line of header + currentY -= rowHeight; + + // Draw rows + for (let rowIndex = 0; rowIndex < data.length; rowIndex++) { + const row = data[rowIndex]; + currentX = startX; + const rowBgColor = rowIndex % 2 === 0 ? rowEvenColor : rowOddColor; + page.drawRectangle({ + x: startX -2, + y: currentY - rowHeight -2, + width: columnWidths.reduce((a, b) => a + b, 0) + 4, + height: rowHeight +4, + color: rowBgColor, + // strokeColor: borderColor, // Optional: cell borders + // borderWidth: 0.5, + }); + + for (let colIndex = 0; colIndex < headers.length; colIndex++) { + const cellValue = String(row[headers[colIndex]] === null || row[headers[colIndex]] === undefined ? '' : row[headers[colIndex]]); + drawText(page, cellValue.substring(0, 25), currentX + 5, currentY - rowHeight + 5, font, fontSize -1); // Truncate long text + currentX += columnWidths[colIndex]; + } + page.drawLine({ start: {x: startX, y: currentY}, end: {x: currentX, y: currentY}, color: borderColor, thickness: 0.5}); // Bottom border line of row + currentY -= rowHeight; + } + page.drawLine({ start: {x: startX, y: currentY}, end: {x: currentX, y: currentY}, color: borderColor, thickness: 0.5}); // Final bottom line + + return currentY; +} + + +export default async function handler(req, res) { + if (req.method !== 'POST' && req.method !== 'GET') { // Allow GET for easy testing, POST for actual use + res.setHeader('Allow', ['GET', 'POST']); + return res.status(405).json({ error: 'Method not allowed' }); + } + + const session = await getServerSession(req, res, authOptions); + if (!session || session.user?.role !== 'Admin') { + return res.status(403).json({ error: 'Forbidden: Access denied.' }); + } + + try { + // 1. Fetch Data (reusing lib/db.js functions) + const [ + totalLeadsResult, + topQuestionsResult, + weeklyLeadsTrendResult, + metaAdsSummaryResult, + metaAdsCampaignDataResult, + ] = await Promise.all([ + getTotalLeadsCount(), + getTopQuestions(5), + getWeeklyLeadsTrend(12), + getMetaAdsSummary(), + getAllMetaAdsCampaignData(), + ]); + + // 2. Create PDF Document + const pdfDoc = await PDFDocument.create(); + const page = pdfDoc.addPage(PageSizes.A4); + const { width, height } = page.getSize(); + const font = await pdfDoc.embedFont(StandardFonts.Helvetica); + const boldFont = await pdfDoc.embedFont(StandardFonts.HelveticaBold); + let yPosition = height - 50; // Start from top + + // Title + page.drawText('Comprehensive Report', { x: 50, y: yPosition, font: boldFont, size: 24 }); + yPosition -= 40; + + // Summary Section + page.drawText('Summary Statistics', { x: 50, y: yPosition, font: boldFont, size: 18 }); + yPosition -= 25; + drawText(page, `Total Leads: ${totalLeadsResult.count || 0}`, 60, yPosition, font, 12); + yPosition -= 20; + drawText(page, `Total Meta Ad Spend: $${parseFloat(metaAdsSummaryResult.data?.totalSpend || 0).toLocaleString()}`, 60, yPosition, font, 12); + yPosition -= 20; + drawText(page, `Total Meta Ad Leads: ${metaAdsSummaryResult.data?.totalLeads || 0}`, 60, yPosition, font, 12); + yPosition -= 30; + + // Top Questions + page.drawText('Top Questions:', { x: 50, y: yPosition, font: boldFont, size: 16 }); + yPosition -= 20; + (topQuestionsResult.data || []).forEach((q, index) => { + if (yPosition < 70) { // Add new page if space is running out + page = pdfDoc.addPage(PageSizes.A4); yPosition = height - 50; + } + drawText(page, `${index + 1}. ${q.question.substring(0,80)}${q.question.length > 80 ? '...' : ''} (${q.count} times)`, 60, yPosition, font, 10); + yPosition -= 15; + }); + yPosition -= 20; + + // Weekly Leads Trend (Textual/Table Representation) + if (yPosition < 150) { page = pdfDoc.addPage(PageSizes.A4); yPosition = height - 50; } + page.drawText('Weekly Leads Trend (Last 12 Weeks)', { x: 50, y: yPosition, font: boldFont, size: 16 }); + yPosition -= 20; + const weeklyLeadsTableData = (weeklyLeadsTrendResult.data || []).map(item => ({ + 'Week Start': new Date(item.week_start_date).toLocaleDateString(), + 'Leads': item.count + })); + yPosition = await drawTable(page, weeklyLeadsTableData, 50, yPosition, [150, 100], 20, font, 10); + yPosition -= 20; + + + // Meta Ads Campaign Data Table + if (yPosition < 200) { page = pdfDoc.addPage(PageSizes.A4); yPosition = height - 50; } + page.drawText('Meta Ads Campaign Details', { x: 50, y: yPosition, font: boldFont, size: 16 }); + yPosition -= 20; + const campaignTableData = (metaAdsCampaignDataResult.data || []).map(ad => ({ + Campaign: ad.campaign_name, + AdSet: ad.ad_set_name, + // Ad: ad.ad_name, // Can make table too wide + Date: new Date(ad.date).toLocaleDateString(), + Spend: `$${parseFloat(ad.spend || 0).toFixed(2)}`, + Leads: ad.leads || 0, + CTR: `${(ad.ctr * 100).toFixed(2)}%`, + CPL: `$${parseFloat(ad.cpl || 0).toFixed(2)}`, + })).slice(0, 15); // Limit rows for PDF space for now + yPosition = await drawTable(page, campaignTableData, 50, yPosition, [100, 100, 60, 60, 50, 50, 60], 20, font, 8); + yPosition -= 20; + + // Meta Ads Performance Chart (Textual Summary) + if (yPosition < 100) { page = pdfDoc.addPage(PageSizes.A4); yPosition = height - 50; } + page.drawText('Meta Ads Performance Summary (Aggregated by Campaign)', { x: 50, y: yPosition, font: boldFont, size: 16 }); + yPosition -= 20; + const aggregatedAdsData = (metaAdsCampaignDataResult.data || []).reduce((acc, item) => { + const campaign = item.campaign_name || 'Unknown Campaign'; + if (!acc[campaign]) acc[campaign] = { name: campaign, spend: 0, leads: 0 }; + acc[campaign].spend += item.spend || 0; + acc[campaign].leads += item.leads || 0; + return acc; + }, {}); + const adsPerformanceTableData = Object.values(aggregatedAdsData).map(item => ({ + Campaign: item.name, + Spend: `$${item.spend.toFixed(2)}`, + Leads: item.leads + })); + yPosition = await drawTable(page, adsPerformanceTableData, 50, yPosition, [200, 100, 100], 20, font, 10); + + + // 3. Serialize PDF and Send Response + const pdfBytes = await pdfDoc.save(); + res.setHeader('Content-Type', 'application/pdf'); + res.setHeader('Content-Disposition', `attachment; filename="report_${new Date().toISOString().split('T')[0]}.pdf"`); + res.status(200).send(Buffer.from(pdfBytes)); // Send as Buffer + + } catch (error) { + console.error('Failed to generate PDF report:', error); + res.status(500).json({ error: 'Failed to generate PDF report', details: error.message }); + } +} diff --git a/pages/api/public-chat.js b/pages/api/public-chat.js new file mode 100644 index 000000000..9d85447e5 --- /dev/null +++ b/pages/api/public-chat.js @@ -0,0 +1,137 @@ +import { OpenAIEmbeddings } from '@langchain/openai'; +import { OpenAI } from '@langchain/openai'; +import { getPineconeClient } from '../../lib/vector'; // Using our Pinecone client wrapper +import { supabase } from '../../lib/db'; // Direct Supabase client +import { logLead } from '../../lib/db'; +import { PINECONE_INDEX_NAME, PINECONE_NAME_SPACE as DEFAULT_PINECONE_NAMESPACE } from '../../config/pinecone'; + +const PINECONE_NAMESPACE_PRODUCTS = process.env.PINECONE_NAMESPACE_PRODUCTS || DEFAULT_PINECONE_NAMESPACE || 'products-namespace'; + +const embeddings = new OpenAIEmbeddings({ + openAIApiKey: process.env.OPENAI_API_KEY, + modelName: "text-embedding-ada-002", +}); + +const llm = new OpenAI({ + openAIApiKey: process.env.OPENAI_API_KEY, + modelName: 'gpt-3.5-turbo', + temperature: 0.6, // Slightly lower temp for more factual public answers +}); + +export default async function handler(req, res) { + if (req.method !== 'POST') { + res.setHeader('Allow', 'POST'); + return res.status(405).json({ error: 'Method not allowed' }); + } + + const { question } = req.body; + + if (!question) { + return res.status(400).json({ message: 'No question in the request' }); + } + + const sanitizedQuestion = question.trim().replaceAll('\n', ' '); + + try { + const questionEmbedding = await embeddings.embedQuery(sanitizedQuestion); + if (!questionEmbedding) { + throw new Error('Failed to generate question embedding.'); + } + + const pineconeClient = await getPineconeClient(); + const index = pineconeClient.Index(PINECONE_INDEX_NAME); + + const queryResponse = await index.namespace(PINECONE_NAMESPACE_PRODUCTS).query({ + topK: 3, // Fewer results for public widget to keep context concise + vector: questionEmbedding, + includeMetadata: true, + }); + + const productIds = queryResponse.matches + ?.map((match) => match.metadata?.productId) + .filter(id => !!id); + + let productContext = ''; + let retrievedProductDetails = []; // For constructing links + + if (productIds && productIds.length > 0) { + const { data: products, error: dbError } = await supabase + .from('products') + .select('id, name, description, claims') // Select fields relevant for context and links + .in('id', productIds); + + if (dbError) { + console.error('Supabase error fetching products for public chat:', dbError); + } else if (products && products.length > 0) { + retrievedProductDetails = products.map(p => ({id: p.id, name: p.name})); // Store for link generation + productContext = "\n\nRelevant Product Information:\n"; + products.forEach((product, idx) => { + productContext += `\nProduct ${idx + 1} (ID: ${product.id}):\n`; + productContext += ` Name: ${product.name}\n`; + if (product.description) productContext += ` Description: ${product.description.substring(0, 150)}...\n`; // Shorter description + if (product.claims && Array.isArray(product.claims) && product.claims.length > 0) { + const claimsText = product.claims.map(c => typeof c === 'string' ? c : c.text).slice(0,2).join(', '); // Fewer claims + if(claimsText) productContext += ` Some Claims: ${claimsText}\n`; + } + }); + } + } + if (!productContext) { + productContext = "\n\nNo specific product information was found for this query. Please answer based on general knowledge or state that product details are not available."; + } + + // Construct product links string if products were retrieved + let productLinksHint = ''; + if (retrievedProductDetails.length > 0) { + productLinksHint = "If relevant, you can point the user to the following product pages for more details:\n"; + retrievedProductDetails.forEach(p => { + // Format as markdown link: [Product Name](/products/ID) + productLinksHint += `- [${p.name}](/products/${p.id})\n`; + }); + } + + + const prompt = `You are a public-facing product assistant for our company. Be helpful, polite, and concise. +Your primary goal is to answer questions about our products and guide users to find more information. +Use the provided product information to answer the user's question. +${productLinksHint} +If the user asks where to buy a product, you can mention that details are often on the product page or they can check with our retail partners (you don't have specific retailer information). +Do not make up information. If the context is insufficient, say that you cannot find the specific detail. + +Product Information Context: +${productContext} + +User's Question: +${sanitizedQuestion} + +Answer:`; + + const llmResponse = await llm.invoke(prompt); + const answer = typeof llmResponse === 'string' ? llmResponse : JSON.stringify(llmResponse); + + try { + await logLead({ + question: sanitizedQuestion, + response: answer, + source: 'public-widget', // user_id will be null by default in logLead if not provided + }); + } catch (logError) { + console.error('Failed to log public lead:', logError); + } + + res.status(200).json({ text: answer }); + + } catch (error) { + console.error('Error in /api/public-chat handler:', error); + try { + await logLead({ + question: sanitizedQuestion, + response: `Error: ${error.message || 'Something went wrong'}`, + source: 'public-widget-error', + }); + } catch (logError) { + console.error('Failed to log public error lead:', logError); + } + res.status(500).json({ error: error.message || 'Something went wrong' }); + } +} diff --git a/pages/api/reports-data.js b/pages/api/reports-data.js new file mode 100644 index 000000000..f870fc0a6 --- /dev/null +++ b/pages/api/reports-data.js @@ -0,0 +1,76 @@ +import { getServerSession } from 'next-auth/next'; +import { authOptions } from './auth/[...nextauth]'; // Adjust path as needed +import { + getTotalLeadsCount, + getTopQuestions, + getWeeklyLeadsTrend, + getMetaAdsSummary, + getAllMetaAdsCampaignData, +} from '../../lib/db'; // Adjust path as needed + +export default async function handler(req, res) { + if (req.method !== 'GET') { + res.setHeader('Allow', 'GET'); + return res.status(405).json({ error: 'Method not allowed' }); + } + + const session = await getServerSession(req, res, authOptions); + if (!session || session.user?.role !== 'Admin') { + return res.status(403).json({ error: 'Forbidden: Access denied.' }); + } + + try { + // Parallel fetching of all data points + const [ + totalLeadsResult, + topQuestionsResult, + weeklyLeadsTrendResult, + metaAdsSummaryResult, + metaAdsCampaignDataResult, + ] = await Promise.all([ + getTotalLeadsCount(), + getTopQuestions(5), // Get top 5 questions + getWeeklyLeadsTrend(12), // Get trend for last 12 weeks + getMetaAdsSummary(), + getAllMetaAdsCampaignData(), + ]); + + // Check for errors in each result and handle them appropriately + // For simplicity, just logging errors here and sending what we have or an error status + if (totalLeadsResult.error) console.error("Error fetching total leads:", totalLeadsResult.error); + if (topQuestionsResult.error) console.error("Error fetching top questions:", topQuestionsResult.error); + if (weeklyLeadsTrendResult.error) console.error("Error fetching weekly leads trend:", weeklyLeadsTrendResult.error); + if (metaAdsSummaryResult.error) console.error("Error fetching meta ads summary:", metaAdsSummaryResult.error); + if (metaAdsCampaignDataResult.error) console.error("Error fetching meta ads campaign data:", metaAdsCampaignDataResult.error); + + // Consolidate data into a single response object + const reportsData = { + summaryCards: { + totalLeads: totalLeadsResult.count || 0, + topQuestions: topQuestionsResult.data || [], + // You could add more summary data from metaAdsSummaryResult here if needed + totalMetaSpend: metaAdsSummaryResult.data?.totalSpend || 0, + totalMetaLeads: metaAdsSummaryResult.data?.totalLeads || 0, + }, + charts: { + weeklyLeadsTrend: weeklyLeadsTrendResult.data || [], + // Potentially transform metaAdsSummaryResult or parts of metaAdsCampaignDataResult for charts + // e.g., spend vs leads from metaAdsSummaryResult (though it's a single summary now) + // or aggregate metaAdsCampaignDataResult by campaign for a bar chart. + // For now, we'll pass the raw campaign data and let client decide on chart representation + metaAdsCampaignData: metaAdsCampaignDataResult.data || [], + }, + tables: { + metaAdsCampaigns: metaAdsCampaignDataResult.data || [], + }, + // Include the full summary for Meta Ads if needed separately + metaAdsOverallSummary: metaAdsSummaryResult.data || {}, + }; + + res.status(200).json(reportsData); + + } catch (error) { + console.error('Failed to fetch reports data:', error); + res.status(500).json({ error: 'Failed to fetch reports data', details: error.message }); + } +} diff --git a/pages/api/send-weekly-report.js b/pages/api/send-weekly-report.js new file mode 100644 index 000000000..a66318a66 --- /dev/null +++ b/pages/api/send-weekly-report.js @@ -0,0 +1,209 @@ +// Vercel Cron Job Configuration (example for vercel.json): +// { +// "crons": [ +// { +// "path": "/api/send-weekly-report?secret=YOUR_ACTUAL_SECRET_TOKEN", // Replace YOUR_ACTUAL_SECRET_TOKEN +// "schedule": "0 12 * * 1" // Monday 08:00 AM UTC-4 (e.g., New York EDT) is 12:00 PM UTC. +// // Vercel cron schedules are in UTC. +// } +// ] +// } +// Note: The secret should be stored as an environment variable (e.g., AUTOMATED_REPORT_SECRET) +// and compared, not hardcoded in the path in vercel.json if using Authorization header. +// If using query param like above, ensure it's a strong, unique secret. + +import { + // For fetching data - adapt if specific date ranges are needed for weekly reports + getTotalLeadsCount, // Might need a version for "last week" + getTopQuestions, // Might need a version for "last week" + getWeeklyLeadsTrend, // This is already weekly, but might need to specify the exact week + getMetaAdsSummary, // Might need a version for "last week" + getAllMetaAdsCampaignData, // Might need a version for "last week" +} from '../../lib/db'; // Adjust path as needed +import { PDFDocument, rgb, StandardFonts, PageSizes } from 'pdf-lib'; // Reusing PDF generation elements +import { sendEmailWithAttachment } from '../../lib/email'; +import { sendTelegramReport } from '../../lib/telegram'; + +// TODO: Refactor PDF generation logic from generate-report-pdf.js into a reusable function +// For now, some parts might be duplicated or simplified for this automated report. + +// Helper function to get date range for the last week +function getLastWeekDateRange() { + const today = new Date(); + const endDate = new Date(today); + endDate.setDate(today.getDate() - today.getDay()); // End of last week (Sunday) + const startDate = new Date(endDate); + startDate.setDate(endDate.getDate() - 6); // Start of last week (Monday) + + const options = { year: 'numeric', month: 'short', day: 'numeric' }; + return { + start: startDate.toLocaleDateString('en-US', options), + end: endDate.toLocaleDateString('en-US', options), + startDateISO: startDate.toISOString().split('T')[0], + endDateISO: endDate.toISOString().split('T')[0], + }; +} + + +async function generateWeeklyReportPDF(reportData, dateRange) { + const pdfDoc = await PDFDocument.create(); + const page = pdfDoc.addPage(PageSizes.A4); + const { width, height } = page.getSize(); + const font = await pdfDoc.embedFont(StandardFonts.Helvetica); + const boldFont = await pdfDoc.embedFont(StandardFonts.HelveticaBold); + let yPosition = height - 50; + + page.drawText(`Weekly Summary Report: ${dateRange.start} - ${dateRange.end}`, { x: 50, y: yPosition, font: boldFont, size: 18 }); + yPosition -= 30; + + // Simplified content for automated report + page.drawText('Key Metrics (Last Week):', { x: 50, y: yPosition, font: boldFont, size: 14 }); + yPosition -= 20; + + // Note: reportData needs to be data specific to the last week. + // The functions from lib/db might need to be adapted or new ones created for date-ranged queries. + // For this example, we'll assume reportData contains appropriately filtered data. + + page.drawText(`- Total Leads: ${reportData.summaryCards?.totalLeads_lastWeek || 'N/A'}`, { x: 60, y: yPosition, font, size: 10 }); + yPosition -= 15; + page.drawText(`- Meta Ad Spend: $${parseFloat(reportData.metaAdsOverallSummary?.totalSpend_lastWeek || 0).toLocaleString()}`, { x: 60, y: yPosition, font, size: 10 }); + yPosition -= 15; + page.drawText(`- Meta Ad Leads: ${reportData.metaAdsOverallSummary?.totalLeads_lastWeek || 'N/A'}`, { x: 60, y: yPosition, font, size: 10 }); + yPosition -= 25; + + page.drawText('Top Questions (Last Week):', { x: 50, y: yPosition, font: boldFont, size: 14 }); + yPosition -= 20; + (reportData.summaryCards?.topQuestions_lastWeek || []).slice(0,3).forEach((q, index) => { + if (yPosition < 70) { page = pdfDoc.addPage(PageSizes.A4); yPosition = height - 50; } + page.drawText(`${index + 1}. ${q.question.substring(0,70)}... (${q.count} times)`, { x: 60, y: yPosition, font, size: 9 }); + yPosition -= 15; + }); + + // Add more sections as needed (e.g., simplified tables for campaign data) + // For brevity, this example PDF is very simple. + + return await pdfDoc.save(); // Returns Uint8Array +} + + +export default async function handler(req, res) { + // 1. Security Check + const providedSecret = req.query.secret || req.headers.authorization?.split(' ')[1]; + if (providedSecret !== process.env.AUTOMATED_REPORT_SECRET) { + console.warn('Unauthorized attempt to access send-weekly-report API.'); + return res.status(401).json({ error: 'Unauthorized' }); + } + + try { + console.log("Starting weekly report generation..."); + const dateRange = getLastWeekDateRange(); + + // 2. Fetch Report Data (for the last week - functions might need adjustment) + // This is a placeholder; actual data fetching needs to be specific to the date range. + // For now, we'll fetch general data and just use the date range in the title. + // TODO: Adapt DB functions to accept date ranges for more accurate weekly reports. (Done for some) + const reportDateOptions = { + startDateISO: dateRange.startDateISO, + endDateISO: dateRange.endDateISO + }; + + const [ + totalLeadsLastWeekResult, + topQuestionsLastWeekResult, + // weeklyLeadsTrendResult, // For a single week PDF, this specific trend might be less relevant than a summary. + // Or, fetch for a longer period to show context. For now, simplifying. + metaAdsSummaryLastWeekResult, + // metaAdsCampaignDataResult, // Full campaign data might be too verbose for a summary email PDF. + // A summary or top N campaigns might be better. + ] = await Promise.all([ + getTotalLeadsCount(reportDateOptions), + getTopQuestions(3, reportDateOptions), // Top 3 questions from last week + // getWeeklyLeadsTrend(12), // Example: show 12 weeks trend for context in report + getMetaAdsSummary(reportDateOptions), + ]); + + const reportDataForPDF = { + summaryCards: { + totalLeads_lastWeek: totalLeadsLastWeekResult.count, + topQuestions_lastWeek: topQuestionsLastWeekResult.data, + }, + metaAdsOverallSummary: { // This now correctly reflects last week's summary + totalSpend_lastWeek: metaAdsSummaryLastWeekResult.data?.totalSpend, + totalLeads_lastWeek: metaAdsSummaryLastWeekResult.data?.totalLeads, + avgCPL_lastWeek: metaAdsSummaryLastWeekResult.data?.avgCPL, + avgCTR_lastWeek: metaAdsSummaryLastWeekResult.data?.avgCTR, + }, + // weeklyLeadsTrendForChart: weeklyLeadsTrendResult.data, // If we decide to include a trend chart + }; + + + // 3. Generate PDF + console.log("Generating PDF report..."); + const pdfBytes = await generateWeeklyReportPDF(reportDataForPDF, dateRange); + const pdfBase64 = Buffer.from(pdfBytes).toString('base64'); // For SendGrid attachment + + const emailSubject = `Weekly Performance Report: ${dateRange.start} - ${dateRange.end}`; + const emailHtmlBody = ` +

Please find attached the Weekly Performance Report for ${dateRange.start} - ${dateRange.end}.

+

Key Metrics for Last Week (${dateRange.start} - ${dateRange.end}):

+
    +
  • Total Leads Generated: ${reportDataForPDF.summaryCards.totalLeads_lastWeek || 0}
  • +
  • Meta Ad Spend: $${parseFloat(reportDataForPDF.metaAdsOverallSummary.totalSpend_lastWeek || 0).toLocaleString()}
  • +
  • Meta Ad Leads: ${reportDataForPDF.metaAdsOverallSummary.totalLeads_lastWeek || 0}
  • +
  • Meta Ad Avg CPL: $${parseFloat(reportDataForPDF.metaAdsOverallSummary.avgCPL_lastWeek || 0).toFixed(2)}
  • +
+

Further details, including top questions from the period, are in the attached PDF.

+ `; + const reportRecipientEmail = process.env.REPORT_RECIPIENT_EMAIL; + const telegramChatId = process.env.TELEGRAM_REPORT_CHAT_ID; + + // 4. Send Email via SendGrid + if (reportRecipientEmail && process.env.SENDGRID_API_KEY && process.env.SENDGRID_FROM_EMAIL) { + try { + console.log(`Sending email report to ${reportRecipientEmail}...`); + await sendEmailWithAttachment({ + to: reportRecipientEmail, + subject: emailSubject, + html: emailHtmlBody, + attachments: [{ + content: pdfBase64, + filename: `weekly_report_${dateRange.startDateISO}_to_${dateRange.endDateISO}.pdf`, + type: 'application/pdf', + disposition: 'attachment', + }], + }); + console.log("Email report sent successfully."); + } catch (emailError) { + console.error("Failed to send email report:", emailError); + // Continue to Telegram, don't let email failure stop Telegram + } + } else { + console.warn("SendGrid (API Key, From Email, or Recipient Email) not fully configured. Skipping email report."); + } + + // 5. Send Report via Telegram + if (telegramChatId && process.env.TELEGRAM_BOT_TOKEN) { + try { + console.log(`Sending Telegram report to chat ID ${telegramChatId}...`); + const caption = `Weekly Report: ${dateRange.start} - ${dateRange.end}\nTotal Leads (last week): ${reportDataForPDF.summaryCards.totalLeads_lastWeek || 0}\nMeta Ad Spend (last week): $${parseFloat(reportDataForPDF.metaAdsOverallSummary.totalSpend_lastWeek || 0).toLocaleString()}`; + await sendTelegramReport(telegramChatId, Buffer.from(pdfBytes), caption, `weekly_report_${dateRange.startDateISO}_to_${dateRange.endDateISO}.pdf`); + console.log("Telegram report sent successfully."); + } catch (telegramError) { + console.error("Failed to send Telegram report:", telegramError); + } + } else { + console.warn("Telegram Bot Token or Chat ID not configured. Skipping Telegram report."); + } + + console.log("Weekly report process completed."); + return res.status(200).json({ + message: 'Weekly report generation and delivery process completed (check logs for individual successes/failures).', + dateRange, + dataSummary: reportDataForPDF // For quick check during development + }); + + } catch (error) { + console.error('Failed to generate or send weekly report:', error); + return res.status(500).json({ error: 'Failed to process weekly report', details: error.message }); + } +} diff --git a/pages/api/settings.js b/pages/api/settings.js new file mode 100644 index 000000000..ea55236fb --- /dev/null +++ b/pages/api/settings.js @@ -0,0 +1,91 @@ +import { getServerSession } from 'next-auth/next'; +import authOptions from './auth/[...nextauth]'; // Ensure this path is correct +import { getAllAppSettings, updateAppSettings } from '../../lib/db'; + +// Helper function to check admin role from session +const isAdmin = (session) => { + return session && session.user && session.user.role === 'Admin'; +}; + +export default async function handler(req, res) { + const session = await getServerSession(req, res, authOptions); + + if (req.method === 'GET') { + // For GET, we can decide if all users can see settings or only admins + // For now, let's assume only admins can see all DB settings, + // but we will also add environment variable statuses which might be less sensitive. + // if (!isAdmin(session)) { + // return res.status(403).json({ message: 'Forbidden: You do not have permission to view settings.' }); + // } + + let dbSettings = {}; + try { + // Fetch non-sensitive settings from DB if needed by non-admins, or all by admins + // For this example, only admins will fetch from DB for simplicity in this step + if (isAdmin(session)) { + const { data, error } = await getAllAppSettings(); + if (error) { + throw new Error(`Failed to fetch database settings: ${error.message}`); + } + dbSettings = data || {}; + } + } catch (error) { + console.error('Error fetching settings:', error); + // Don't return DB error details to client if not admin or if it's sensitive + if (isAdmin(session)) { + return res.status(500).json({ message: error.message || 'Failed to fetch settings.' }); + } + // For non-admins, we might only return env var statuses and not error out here + } + + // Status of environment-variable-based API keys + const apiKeyStatuses = { + OPENAI_API_KEY: process.env.OPENAI_API_KEY ? 'Configured' : 'Not Set', + PINECONE_API_KEY: process.env.PINECONE_API_KEY ? 'Configured' : 'Not Set', + PINECONE_ENVIRONMENT: process.env.PINECONE_ENVIRONMENT ? 'Configured' : 'Not Set', + PINECONE_INDEX_NAME: process.env.PINECONE_INDEX_NAME ? 'Configured' : 'Not Set', + // Add other keys as needed + SENDGRID_API_KEY: process.env.SENDGRID_API_KEY ? 'Configured' : 'Not Set', + TELEGRAM_BOT_TOKEN: process.env.TELEGRAM_BOT_TOKEN ? 'Configured' : 'Not Set', + SUPABASE_URL: process.env.SUPABASE_URL ? 'Configured' : 'Not Set', // For info + }; + + // Combine DB settings (for admins) and API key statuses + const responseSettings = isAdmin(session) + ? { ...dbSettings, apiKeyStatuses } + : { apiKeyStatuses }; // Non-admins only see API key statuses + + return res.status(200).json(responseSettings); + + } else if (req.method === 'POST') { + if (!isAdmin(session)) { + return res.status(403).json({ message: 'Forbidden: You do not have permission to update settings.' }); + } + + const { settingsToUpdate } = req.body; // Expects an object like { settingKey: 'newValue', ... } + + if (!settingsToUpdate || typeof settingsToUpdate !== 'object' || Object.keys(settingsToUpdate).length === 0) { + return res.status(400).json({ message: 'No settings provided to update or invalid format.' }); + } + + // Filter out apiKeyStatuses from being saved to DB if they are accidentally sent + const filteredSettings = { ...settingsToUpdate }; + delete filteredSettings.apiKeyStatuses; + // Potentially add more logic here to prevent saving sensitive keys if their actual values are sent + + try { + const { data, error } = await updateAppSettings(filteredSettings); + if (error) { + throw new Error(`Failed to update settings: ${error.message}`); + } + return res.status(200).json({ message: 'Settings updated successfully.', updated: data }); + } catch (error) { + console.error('Error updating settings:', error); + return res.status(500).json({ message: error.message || 'Failed to update settings.' }); + } + + } else { + res.setHeader('Allow', ['GET', 'POST']); + return res.status(405).json({ message: `Method ${req.method} Not Allowed` }); + } +} diff --git a/pages/api/upload-csv.js b/pages/api/upload-csv.js new file mode 100644 index 000000000..f44db0254 --- /dev/null +++ b/pages/api/upload-csv.js @@ -0,0 +1,186 @@ +import { IncomingForm } from 'formidable'; +import { parse } from 'csv-parse'; +import { Readable } from 'stream'; +import { upsertProducts, upsertMetaAds } from '../../lib/db'; +import { getProductEmbedding } from '../../lib/openai'; +import { upsertProductVector } from '../../lib/vector'; + +// Disable Next.js body parsing for this route, as formidable will handle it +export const config = { + api: { + bodyParser: false, + }, +}; + +export default async function handler(req, res) { + if (req.method !== 'POST') { + res.setHeader('Allow', 'POST'); + return res.status(405).json({ message: 'Method Not Allowed' }); + } + + const form = new IncomingForm({ + // keepExtensions: true, // Not saving files, so not strictly necessary + // uploadDir: '/tmp', // Not saving files + // formidable options to avoid saving to disk, process in memory + maxFileSize: 100 * 1024 * 1024, // 100MB limit for example + filter: function ({ name, originalFilename, mimetype }) { + // keep only csv files + return mimetype && mimetype.includes('csv'); + } + }); + + try { + const { fields, files } = await new Promise((resolve, reject) => { + form.parse(req, (err, fields, files) => { + if (err) { + console.error('Formidable parsing error:', err); + return reject(err); + } + resolve({ fields, files }); + }); + }); + + const file = files.file?.[0]; // formidable nests files in arrays + const uploadType = fields.type?.[0]; // fields are also in arrays + + if (!file) { + return res.status(400).json({ message: 'No file uploaded.' }); + } + if (!uploadType) { + return res.status(400).json({ message: 'Upload type not specified.' }); + } + + const records = []; + // Create a readable stream from the buffer formidable provides if file is small + // or from the file stream if formidable is configured to stream + const fileStream = Readable.from(file.filepath ? require('fs').createReadStream(file.filepath) : Buffer.from(require('fs').readFileSync(file.path))); // file.path is deprecated, use file.filepath + // Actually, formidable v3 gives a `File` object. Its `filepath` is where it's temporarily stored. + // To avoid disk writes entirely with formidable, you'd need to pipe `part` streams directly. + // For now, this assumes formidable might write a temp file, which is common. + // A more advanced setup for zero disk IO would involve directly piping part streams in formidable's 'part' event. + + const parser = fileStream.pipe( + parse({ + columns: true, // Output rows as objects + skip_empty_lines: true, + trim: true, + // autoParse: true, // Be careful with autoParse, especially for dates and numbers if format is tricky + // cast: true, // More robust casting + cast: (value, context) => { + if (context.header) return value; // Do not cast headers + // Example casting for known numeric fields (adjust based on actual CSV headers) + if (['impressions', 'clicks', 'leads'].includes(context.column)) { + const num = parseInt(value, 10); + return isNaN(num) ? null : num; + } + if (['spend'].includes(context.column)) { + const num = parseFloat(value); + return isNaN(num) ? null : num; + } + // Example for dates if not automatically parsed by `date: true` or if specific format + if (context.column === 'date' && value) { + // Basic ISO 8601 date check, adjust if CSV has different format + if (/^\d{4}-\d{2}-\d{2}$/.test(value)) { + return new Date(value); + } + // Try to parse common date formats if needed, or ensure CSV is standardized + const d = new Date(value); + return d instanceof Date && !isNaN(d) ? d : value; + } + // Claims and nutrition_info might be JSON strings in CSV + if (['claims', 'nutrition_info'].includes(context.column) && value) { + try { + return JSON.parse(value); + } catch (e) { + // console.warn(`Failed to parse JSON for column ${context.column}: ${value}`); + return value; // Return as string if not valid JSON + } + } + return value; + }, + }) + ); + + for await (const record of parser) { + records.push(record); + } + + // Clean up temp file if formidable created one + if (file.filepath && require('fs').existsSync(file.filepath)) { + require('fs').unlinkSync(file.filepath); + } + + let result; + let embeddingErrors = []; + if (uploadType === 'products') { + result = await upsertProducts(records); + if (!result.error && result.data && result.data.length > 0) { + console.log(`Successfully upserted ${result.data.length} products to relational DB. Starting embedding generation...`); + // Asynchronously generate embeddings and upsert to vector DB + // For now, sequential processing. For large datasets, consider a job queue. + const embeddingPromises = result.data.map(async (product) => { + try { + // Ensure product object has necessary fields (name, description, claims) + // The product object from upsertProducts should contain the full row. + const embedding = await getProductEmbedding(product); + if (embedding) { + // Product ID from the database (result.data[x].id) is crucial here + await upsertProductVector(product.id, embedding, { + name: product.name, + description: product.description // Add any other metadata you want in Pinecone + }); + } else { + console.warn(`No embedding generated for product ${product.id}, skipping vector DB upsert.`); + embeddingErrors.push({ productId: product.id, error: "Embedding generation returned null." }); + } + } catch (embeddingError) { + console.error(`Failed to generate/upsert embedding for product ${product.id}:`, embeddingError); + embeddingErrors.push({ productId: product.id, error: embeddingError.message }); + } + }); + await Promise.all(embeddingPromises); // Wait for all embedding processes to complete + if (embeddingErrors.length > 0) { + console.warn("Some products had embedding errors:", embeddingErrors); + // Not returning these errors to client for now to keep success message focused on DB insert, + // but this could be changed to include a partial success/warning. + } + } else if (result.error) { + console.error("Error upserting products to relational DB:", result.error); + // Error already handled below + } else { + console.log("No product data returned from upsertProducts or no data to process for embeddings."); + } + } else if (uploadType === 'metaAds') { + result = await upsertMetaAds(records); + } else { + return res.status(400).json({ message: 'Invalid upload type.' }); + } + + if (result.error) { + console.error(`Database upsert error for ${uploadType}:`, result.error); + return res.status(500).json({ message: `Database error: ${result.error.message}` }); + } + + let responseMessage = `${uploadType} data uploaded and processed successfully.`; + if (embeddingErrors.length > 0) { + responseMessage += ` Some products had embedding generation/upsert errors: ${embeddingErrors.length} errors. Check server logs.`; + } + + return res.status(200).json({ + message: responseMessage, + processedCount: records.length, + insertedCount: result.data?.length || 0, // This is count from relational DB + embeddingErrors: embeddingErrors, // Optionally send error details to client + }); + + } catch (error) { + console.error('Error in upload-csv handler:', error); + // Clean up temp file in case of error too + // This part is tricky because `file` might not be defined if formidable.parse itself failed early. + // const file = error.field?.file?.[0] || (form.parsedFields?.file?.[0]); // Attempt to get file if possible + // if (file?.filepath && require('fs').existsSync(file.filepath)) { + // require('fs').unlinkSync(file.filepath); + // } + return res.status(500).json({ message: error.message || 'An unknown error occurred during file upload.' }); + } +} diff --git a/pages/index.tsx b/pages/index.tsx index cadf96f6b..dcfe9dac2 100644 --- a/pages/index.tsx +++ b/pages/index.tsx @@ -1,4 +1,5 @@ import { useRef, useState, useEffect } from 'react'; +import { useSession, signIn } from 'next-auth/react'; // Added for auth check import Layout from '@/components/layout'; import styles from '@/styles/Home.module.css'; import { Message } from '@/types/chat'; @@ -6,6 +7,7 @@ import Image from 'next/image'; import ReactMarkdown from 'react-markdown'; import LoadingDots from '@/components/ui/LoadingDots'; import { Document } from 'langchain/document'; +import ProductCard from '@/components/ui/ProductCard'; // Import ProductCard import { Accordion, AccordionContent, @@ -14,6 +16,7 @@ import { } from '@/components/ui/accordion'; export default function Home() { + const { data: session, status } = useSession(); const [query, setQuery] = useState(''); const [loading, setLoading] = useState(false); const [error, setError] = useState(null); @@ -25,11 +28,12 @@ export default function Home() { }>({ messages: [ { - message: 'Hi, what would you like to learn about this document?', + message: "Hi, I'm your product assistant. How can I help you find information today?", type: 'apiMessage', }, ], history: [], + // pendingSourceDocs: undefined, // Ensure this is how it's initially defined if used }); const { messages, history } = messageState; @@ -69,6 +73,7 @@ export default function Home() { setQuery(''); try { + // Changed API endpoint to /api/seller-chat (or keep /api/chat and modify it) const response = await fetch('/api/chat', { method: 'POST', headers: { @@ -93,9 +98,10 @@ export default function Home() { type: 'apiMessage', message: data.text, sourceDocs: data.sourceDocuments, + productData: data.productData, // Store productData in message state }, ], - history: [...state.history, [question, data.text]], + history: [...state.history, [question, data.text]], // History only stores question and text response })); } console.log('messageState', messageState); @@ -120,14 +126,43 @@ export default function Home() { } }; + if (status === 'loading') { + return

Loading session...

; + } + + if (!session) { + return ( + +
+

Please sign in to use the Product Assistant.

+ +
+
+ ); + } + + // If user is admin, you could redirect them to /admin or show a different UI + // For now, we assume both Admin and other authenticated users use this chat. + // if (session.user?.role === 'Admin') { + // // router.push('/admin'); // needs useRouter from 'next/router' + // // return null; + // } + return ( <>

- Chat With Your Docs + Product Information Assistant

+ {/* Add a comment for future image/file upload here if desired */} + {/* */}
{messages.map((message, index) => { @@ -165,8 +200,8 @@ export default function Home() { : styles.usermessage; } return ( - <> -
+ +
{icon}
@@ -174,6 +209,14 @@ export default function Home() {
+ {/* Render ProductCard components if productData exists */} + {message.type === 'apiMessage' && message.productData && message.productData.length > 0 && ( +
+ {message.productData.map((product, productIndex) => ( + + ))} +
+ )} {message.sourceDocs && (
- {message.sourceDocs.map((doc, index) => ( -
- + {message.sourceDocs.map((doc, docIndex) => ( +
+ -

Source {index + 1}

+

Source {docIndex + 1}

@@ -204,7 +247,7 @@ export default function Home() {
)} - + ); })}
@@ -224,11 +267,12 @@ export default function Home() { placeholder={ loading ? 'Waiting for response...' - : 'What is this legal case about?' + : 'Ask about products, e.g., "What are the claims for product X?" or "Compare product Y and Z."' } value={query} onChange={(e) => setQuery(e.target.value)} className={styles.textarea} + // Future: Add handler for image/file uploads /> +
+ + {/* Summary Cards Section */} +
+ + + + {/* Top Questions can be a bit more complex for a simple value card */} + + {summaryCards.topQuestions && summaryCards.topQuestions.length > 0 ? ( +
    + {summaryCards.topQuestions.map((q, index) => ( +
  • + {q.question} ({q.count}) +
  • + ))} +
+ ) : ( +

No question data.

+ )} +
+
+ + {/* Charts Section */} +
+
+

Weekly Leads Trend

+ {charts.weeklyLeadsTrend && charts.weeklyLeadsTrend.length > 0 ? ( + + ) : ( +

No weekly leads data available.

+ )} +
+
+

Meta Ads Performance (Spend vs Leads per Campaign)

+ {charts.metaAdsCampaignData && charts.metaAdsCampaignData.length > 0 ? ( + + ) : ( +

No Meta Ads data available for chart.

+ )} +
+
+ + {/* Table Section */} +
+

Meta Ads Campaign Details

+ {tables.metaAdsCampaigns.length > 0 ? ( + // +
+ + + + + + + + + + + + + + + + + {tables.metaAdsCampaigns.map((ad, index) => ( + + + + + + + + + + + + + ))} + +
CampaignAd SetAd NameDateSpendImpressionsClicksLeadsCTRCPL
{ad.campaign_name}{ad.ad_set_name}{ad.ad_name}{new Date(ad.date).toLocaleDateString()}${parseFloat(ad.spend || 0).toFixed(2)}{ad.impressions || 0}{ad.clicks || 0}{ad.leads || 0}{(ad.ctr * 100).toFixed(2)}%${parseFloat(ad.cpl || 0).toFixed(2)}
+
+ ) : ( +

No campaign data available.

+ )} +
+
+ ); +} + +const ProtectedReportsDashboard = () => { + return ( + + + + ); +}; + +export default withAdminAuth(ProtectedReportsDashboard); diff --git a/pages/unauthorized.tsx b/pages/unauthorized.tsx new file mode 100644 index 000000000..fb56959cf --- /dev/null +++ b/pages/unauthorized.tsx @@ -0,0 +1,16 @@ +import Layout from '../components/layout'; +import Link from 'next/link'; + +export default function UnauthorizedPage() { + return ( + +
+

Access Denied

+

You do not have the necessary permissions to view this page.

+ + Go to Homepage + +
+
+ ); +} diff --git a/pages/widget.jsx b/pages/widget.jsx new file mode 100644 index 000000000..56e344a91 --- /dev/null +++ b/pages/widget.jsx @@ -0,0 +1,151 @@ +import React, { useState, useRef, useEffect } from 'react'; +import ReactMarkdown from 'react-markdown'; +import styles from '@/styles/Widget.module.css'; // Create this CSS module +import LoadingDots from '@/components/ui/LoadingDots'; // Re-use if available and suitable + +// This page will directly implement the widget UI. +// It's structured like a self-contained component. + +export default function WidgetPage() { + const [query, setQuery] = useState(''); + const [loading, setLoading] = useState(false); + const [error, setError] = useState(null); + const [messages, setMessages] = useState([ + { + id: 'init', + type: 'apiMessage', + message: "Hello! I'm a public product assistant. Ask me about our products!", + }, + ]); + + const messageListRef = useRef(null); + const textAreaRef = useRef(null); + + useEffect(() => { + textAreaRef.current?.focus(); + }, []); + + useEffect(() => { + if (messageListRef.current) { + messageListRef.current.scrollTop = messageListRef.current.scrollHeight; + } + }, [messages]); + + async function handleSubmit(e) { + e.preventDefault(); + setError(null); + + if (!query.trim()) { + alert('Please input a question.'); + return; + } + + const question = query.trim(); + const userMessage = { + id: `user-${Date.now()}`, + type: 'userMessage', + message: question, + }; + setMessages(prevMessages => [...prevMessages, userMessage]); + setLoading(true); + setQuery(''); + + try { + const response = await fetch('/api/public-chat', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ question }), // No history sent for simplicity in public widget + }); + const data = await response.json(); + + if (data.error) { + setError(data.error); + const errorMessage = { + id: `api-error-${Date.now()}`, + type: 'apiMessage', + message: `Sorry, an error occurred: ${data.error}`, + }; + setMessages(prevMessages => [...prevMessages, errorMessage]); + } else { + const apiMessage = { + id: `api-${Date.now()}`, + type: 'apiMessage', + message: data.text, // Assuming 'text' is the response field + }; + setMessages(prevMessages => [...prevMessages, apiMessage]); + } + } catch (err) { + setError('An error occurred while fetching data. Please try again.'); + const errorMessage = { + id: `fetch-error-${Date.now()}`, + type: 'apiMessage', + message: 'Sorry, I couldn\'t connect to the server. Please try again later.', + }; + setMessages(prevMessages => [...prevMessages, errorMessage]); + console.error(err); + } finally { + setLoading(false); + textAreaRef.current?.focus(); + } + } + + const handleEnter = (e) => { + if (e.key === 'Enter' && query && !e.shiftKey) { + handleSubmit(e); + } + }; + + return ( +
+
+

Public Product Assistant

+
+
+ {messages.map((message) => { + const isUser = message.type === 'userMessage'; + return ( +
+
+ {message.message} +
+
+ ); + })} + {loading && ( +
+ +
+ )} +
+
+
+