Comments (16)
here is my core.ts
import { db } from '@/db'
import { getKindeServerSession } from '@kinde-oss/kinde-auth-nextjs/server'
import {
createUploadthing,
type FileRouter,
} from 'uploadthing/next'
import { PDFLoader } from 'langchain/document_loaders/fs/pdf'
import { OpenAIEmbeddings } from 'langchain/embeddings/openai'
import { PineconeStore } from 'langchain/vectorstores/pinecone'
import { getPineconeClient } from '@/lib/pinecone'
import { getUserSubscriptionPlan } from '@/lib/stripe'
import { PLANS } from '@/config/stripe'
const f = createUploadthing()
const middleware = async () => {
const { getUser } = getKindeServerSession()
const user = getUser()
if (!user || !user.id) throw new Error('Unauthorized')
const subscriptionPlan = await getUserSubscriptionPlan()
return { subscriptionPlan, userId: user.id }
}
const onUploadComplete = async ({
metadata,
file,
}: {
metadata: Awaited<ReturnType>
file: {
key: string
name: string
url: string
}
}) => {
const isFileExist = await db.file.findFirst({
where: {
key: file.key,
},
})
if (isFileExist) return
const createdFile = await db.file.create({
data: {
key: file.key,
name: file.name,
userId: metadata.userId,
url: https://uploadthing-prod.s3.us-west-2.amazonaws.com/${file.key}
,
uploadStatus: 'PROCESSING',
},
})
try {
const response = await fetch(
https://uploadthing-prod.s3.us-west-2.amazonaws.com/${file.key}
)
const blob = await response.blob()
const loader = new PDFLoader(blob)
const pageLevelDocs = await loader.load()
const pagesAmt = pageLevelDocs.length
const { subscriptionPlan } = metadata
const { isSubscribed } = subscriptionPlan
const isProExceeded =
pagesAmt >
PLANS.find((plan) => plan.name === 'Pro')!.pagesPerPdf
const isFreeExceeded =
pagesAmt >
PLANS.find((plan) => plan.name === 'Free')!
.pagesPerPdf
if (
(isSubscribed && isProExceeded) ||
(!isSubscribed && isFreeExceeded)
) {
await db.file.update({
data: {
uploadStatus: 'FAILED',
},
where: {
id: createdFile.id,
},
})
}
// vectorize and index entire document
const pinecone = await getPineconeClient()
const pineconeIndex = pinecone.Index('chatpdf')
const embeddings = new OpenAIEmbeddings({
openAIApiKey: process.env.OPENAI_API_KEY,
})
await PineconeStore.fromDocuments(
pageLevelDocs,
embeddings,
{
pineconeIndex,
namespace: createdFile.id,
}
)
await db.file.update({
data: {
uploadStatus: 'SUCCESS',
},
where: {
id: createdFile.id,
},
})
} catch (err) {
await db.file.update({
data: {
uploadStatus: 'FAILED',
},
where: {
id: createdFile.id,
},
})
}
}
export const ourFileRouter = {
freePlanUploader: f({ pdf: { maxFileSize: '4MB' } })
.middleware(middleware)
.onUploadComplete(onUploadComplete),
proPlanUploader: f({ pdf: { maxFileSize: '16MB' } })
.middleware(middleware)
.onUploadComplete(onUploadComplete),
} satisfies FileRouter
export type OurFileRouter = typeof ourFileRouter
from quill.
Found this related error that was caused by not defining the index, seems to me you did that though. Might help anyways: mayooear/gpt4-pdf-chatbot-langchain#40
from quill.
PATH::\src\app\api\message\route.ts
import { db } from '@/db'
import { openai } from '@/lib/openai'
import { getPineconeClient } from '@/lib/pinecone'
import { SendMessageValidator } from '@/lib/validators/SendMessageValidator'
import { getKindeServerSession } from '@kinde-oss/kinde-auth-nextjs/server'
import { OpenAIEmbeddings } from 'langchain/embeddings/openai'
import { PineconeStore } from 'langchain/vectorstores/pinecone'
import { NextRequest } from 'next/server'
import { OpenAIStream, StreamingTextResponse } from 'ai'
export const POST = async (req: NextRequest) => {
// endpoint for asking a question to a pdf file
const body = await req.json()
const { getUser } = getKindeServerSession()
const user = getUser()
const { id: userId } = user
if (!userId)
return new Response('Unauthorized', { status: 401 })
const { fileId, message } =
SendMessageValidator.parse(body)
const file = await db.file.findFirst({
where: {
id: fileId,
userId,
},
})
if (!file)
return new Response('Not found', { status: 404 })
await db.message.create({
data: {
text: message,
isUserMessage: true,
userId,
fileId,
},
})
// 1: vectorize message
const embeddings = new OpenAIEmbeddings({
openAIApiKey: process.env.OPENAI_API_KEY,
})
const pinecone = await getPineconeClient() //Must add this line also check this in ...\src\app\api\uploadthing\core.ts
const pineconeIndex = pinecone.Index('YourIndexName') //Replace this with your pinecone index name
const vectorStore = await PineconeStore.fromExistingIndex(
embeddings,
{
//@ts-ignore
pineconeIndex,
namespace: file.id,
}
)
const results = await vectorStore.similaritySearch(
message,
4
)
const prevMessages = await db.message.findMany({
where: {
fileId,
},
orderBy: {
createdAt: 'asc',
},
take: 6,
})
const formattedPrevMessages = prevMessages.map((msg) => ({
role: msg.isUserMessage
? ('user' as const)
: ('assistant' as const),
content: msg.text,
}))
const response = await openai.chat.completions.create({
model: 'gpt-3.5-turbo',
temperature: 0,
stream: true,
messages: [
{
role: 'system',
content:
'Use the following pieces of context (or previous conversaton if needed) to answer the users question in markdown format.',
},
{
role: 'user',
content: `Use the following pieces of context (or previous conversaton if needed) to answer the users question in markdown format. \nIf you don't know the answer, just say that you don't know, don't try to make up an answer.
\n----------------\n
PREVIOUS CONVERSATION:
${formattedPrevMessages.map((message) => {
if (message.role === 'user')
return `User: ${message.content}\n`
return `Assistant: ${message.content}\n`
})}
\n----------------\n
CONTEXT:
${results.map((r) => r.pageContent).join('\n\n')}
USER INPUT: ${message}`,
},
],
})
const stream = OpenAIStream(response, {
async onCompletion(completion) {
await db.message.create({
data: {
text: completion,
isUserMessage: false,
fileId,
userId,
},
})
},
})
return new StreamingTextResponse(stream)
}
PATH::\src\lib\pinecone.ts
import { PineconeClient } from '@pinecone-database/pinecone'
export const getPineconeClient = async () => {
const client = new PineconeClient()
await client.init({
apiKey: process.env.PINECONE_API_KEY!,
environment: 'asia-southeast1-gcp-free', //Replace this with your pinecone environment, Check Pinecone website for this.
})
return client
}
from quill.
Maybe because Pinecone "gcp-starter" doesn't support namespace.
from quill.
Maybe because Pinecone "gcp-starter" doesn't support namespace.
Hs this been confirmed? @susanta96?
from quill.
Yes.
from quill.
Then what is the fix if it is "gcp-starter" ?
from quill.
@varunagarwal007 To resolve the namespace with gcp-starter
, you need to add a metadata field for the file id to search by instead of namespace.
// uploadthing/core.ts
var pageLevelDocs = await loader.load();
pageLevelDocs = pageLevelDocs.map((doc) => {
doc.metadata = {
...doc.metadata,
fileId: createdFile.id, // map over the docs and add the file id.
};
return doc;
});
const pageAmt = pageLevelDocs.length;
const pinecone = await getPineconeClient();
const pineconeIndex = pinecone.Index("quill-chat");
// message/route.ts
const pineconeIndex = pinecone.Index("quill-chat");
const vectorStore = await PineconeStore.fromExistingIndex(embeddings, {
pineconeIndex,
filter: { fileId }, // filter by the file id
});
const results = await vectorStore.similaritySearch(message, 4);
from quill.
where should i paste this message/route code in the actual file? @willbrandin
from quill.
@BlitZSenpai in /uploadthing/core.ts
you only need to map over page level docs. I have mine on Line 66, but could vary for you.
In /message/route.ts
the code should all be the same as in the @joschan21's video, the only difference is you are replacing namespace with filter: { fileId }
.
from quill.
@willbrandin ye but im getting this error
pinecone.d.ts(8, 5): The expected type comes from property 'pineconeIndex' which is declared here on type 'PineconeLibArgs'
(property) PineconeLibArgs.pineconeIndex: Index<RecordMetadata>```
at /core.ts on the line
```await PineconeStore.fromDocuments(pageLevelDocs, embeddings, {
pineconeIndex,
namespace: createdFile.id,
});```
from quill.
Yes, you are using namespace
which is not on the free plan. You need to use filters.
from quill.
@willbrandin can u gimme a replacement for those 3 lines. i replaced namespace with filters and the line pineconeIndex still has the same error
from quill.
`import { db } from "@/db";
import { getKindeServerSession } from "@kinde-oss/kinde-auth-nextjs/server";
import { createUploadthing, type FileRouter } from "uploadthing/next";
import { PDFLoader } from "langchain/document_loaders/fs/pdf";
import { OpenAIEmbeddings } from "langchain/embeddings/openai";
import { PineconeStore } from "langchain/vectorstores/pinecone";
import { getPineconeClient } from "@/lib/pinecone";
const f = createUploadthing();
export const ourFileRouter = {
pdfUploader: f({ pdf: { maxFileSize: "4MB" } })
.middleware(async ({ req }) => {
const { getUser } = getKindeServerSession();
const user = await getUser();
if (!user || !user.id) throw new Error("Unauthorized");
return { userId: user.id };
})
.onUploadComplete(async ({ metadata, file }) => {
const createdFile = await db.file.create({
data: {
key: file.key,
name: file.name,
userId: metadata.userId,
url: `https://uploadthing-prod.s3.us-west-2.amazonaws.com/${file.key}`,
uploadStatus: "PROCESSING",
},
});
try {
const response = await fetch(`https://uploadthing-prod.s3.us-west-2.amazonaws.com/${file.key}`);
const blob = await response.blob();
const loader = new PDFLoader(blob);
const pageLevelDocs = await loader.load();
const pageAmount = pageLevelDocs.length;
const pinecone = await getPineconeClient();
const pineconeIndex = pinecone.Index("otto");
const embeddings = new OpenAIEmbeddings({
openAIApiKey: process.env.OPENAI_API_KEY,
});
await PineconeStore.fromDocuments(pageLevelDocs, embeddings, {
pineconeIndex, //error at this line
filters: createdFile.id,
});
await db.file.update({
data: {
uploadStatus: "SUCCESS",
},
where: {
id: createdFile.id,
},
});
} catch (err) {
await db.file.update({
data: {
uploadStatus: "FAILED",
},
where: {
id: createdFile.id,
},
});
}
}),
} satisfies FileRouter;
export type OurFileRouter = typeof ourFileRouter;
`
This is my core.ts and im getting this error "pinecone.d.ts(8, 5): The expected type comes from property 'pineconeIndex' which is declared here on type 'PineconeLibArgs'
(property) PineconeLibArgs.pineconeIndex: Index" at pinecodeIndex i marked above with a comment
from quill.
@willbrandin ye but im getting this error
pinecone.d.ts(8, 5): The expected type comes from property 'pineconeIndex' which is declared here on type 'PineconeLibArgs' (property) PineconeLibArgs.pineconeIndex: Index<RecordMetadata>``` at /core.ts on the line ```await PineconeStore.fromDocuments(pageLevelDocs, embeddings, { pineconeIndex, namespace: createdFile.id, });```
i am also getting this error please help someone to solve this
from quill.
why this error occurs
from quill.
Related Issues (20)
- Minor question HOT 2
- connect to pinecone db have some error, how to fix them?thx.
- send Message function not working HOT 2
- Anyone facing any issue or need a upgraded?
- 'user' is possibly 'null'.ts(18047) const user: KindeUser | null HOT 2
- When I try to upgrade my subscription, it doesn't upgrade. HOT 1
- The logged-in user is not showing in Prisma Studio. HOT 2
- ChatWrapper / getFileUploadStatus: Property status does not exist on type HOT 4
- Cannot find module langchain/dist/document_loaders/fs/pdf or its corresponding type declarations HOT 1
- Error [InsufficientQuotaError]: 429 HOT 1
- The logged-in user is not showing in Prisma Studio. im using Neon console HOT 1
- Property 'status' does not exist on type 'Query<{ status: UploadStatus; }, TRPCClientErrorLike<{ input: { fileId: string; }; output: { status: UploadStatus; }; transformer: false; errorShape: DefaultErrorShape; }>, { ...; }, any>'.ts(2339) HOT 3
- Image Source error in image-loader.js HOT 4
- Property 'id' does not exist on type 'KindeUser | null'.ts(2339) on src\app\api\message\route.ts HOT 5
- Object literal may only specify known properties, and 'environment' does not exist in type 'PineconeConfiguration'.ts(2353) HOT 1
- what is the issue why im not able to chat with pdf HOT 8
- i am new which database to connect? (free) HOT 2
- Issue when uploading PDF in dashboard. PDF upload error HOT 3
- Dashboard doesn't get loaded at all after logging in
- Property 'given_name' does not exist on type 'Promise<KindeUser | null> \src\components\Navbar.tsx HOT 1
Recommend Projects
-
React
A declarative, efficient, and flexible JavaScript library for building user interfaces.
-
Vue.js
🖖 Vue.js is a progressive, incrementally-adoptable JavaScript framework for building UI on the web.
-
Typescript
TypeScript is a superset of JavaScript that compiles to clean JavaScript output.
-
TensorFlow
An Open Source Machine Learning Framework for Everyone
-
Django
The Web framework for perfectionists with deadlines.
-
Laravel
A PHP framework for web artisans
-
D3
Bring data to life with SVG, Canvas and HTML. 📊📈🎉
-
Recommend Topics
-
javascript
JavaScript (JS) is a lightweight interpreted programming language with first-class functions.
-
web
Some thing interesting about web. New door for the world.
-
server
A server is a program made to process requests and deliver data to clients.
-
Machine learning
Machine learning is a way of modeling and interpreting data that allows a piece of software to respond intelligently.
-
Visualization
Some thing interesting about visualization, use data art
-
Game
Some thing interesting about game, make everyone happy.
Recommend Org
-
Facebook
We are working to build community through open source technology. NB: members must have two-factor auth.
-
Microsoft
Open source projects and samples from Microsoft.
-
Google
Google ❤️ Open Source for everyone.
-
Alibaba
Alibaba Open Source for everyone
-
D3
Data-Driven Documents codes.
-
Tencent
China tencent open source team.
from quill.