Refactor to use postgresql end to end
This commit is contained in:
parent
8605ee6b2c
commit
20d198e559
5 changed files with 144 additions and 72 deletions
|
|
@ -1,18 +1,36 @@
|
|||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy import Column, Integer, String, DateTime
|
||||
from sqlalchemy import Column, String, DateTime, ForeignKey, Index
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy.orm import relationship, mapped_column
|
||||
import uuid
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class Website(Base):
|
||||
|
||||
__tablename__ = 'websites'
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default = uuid.uuid4)
|
||||
class Documents(Base):
|
||||
__tablename__ = 'documents'
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
url = Column(String)
|
||||
text_content = Column(String)
|
||||
html_content = Column(String)
|
||||
first_crawl_date = Column(DateTime)
|
||||
last_crawl_date = Column(DateTime)
|
||||
document_tokens = relationship("Document_Tokens", back_populates="document")
|
||||
|
||||
|
||||
class Document_Tokens(Base):
|
||||
__tablename__ = 'document_tokens'
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
document_id = mapped_column(ForeignKey("documents.id"))
|
||||
# Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
token_id = mapped_column(ForeignKey("tokens.id"))
|
||||
#Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
document = relationship("Documents", back_populates="document_tokens", uselist=False)
|
||||
token = relationship("Tokens", back_populates="document_tokens")
|
||||
|
||||
|
||||
class Tokens(Base):
|
||||
__tablename__ = 'tokens'
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
token = Column(String, index=True)
|
||||
document_tokens = relationship("Document_Tokens", back_populates="token")
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue