Coverage for mindsdb / migrations / versions / 2021-11-30_17c3d2384711_init.py: 90%
120 statements
« prev ^ index » next coverage.py v7.13.1, created at 2026-01-21 00:36 +0000
« prev ^ index » next coverage.py v7.13.1, created at 2026-01-21 00:36 +0000
1import datetime
3from alembic.autogenerate import produce_migrations, render, api
4from alembic import context
5from sqlalchemy import UniqueConstraint
6from sqlalchemy.orm import declarative_base
7from sqlalchemy import Column, Integer, String, DateTime, Boolean, Index
9# required for code execution
10from alembic import op # noqa
11import sqlalchemy as sa # noqa
13import mindsdb.interfaces.storage.db # noqa
14from mindsdb.interfaces.storage.db import Json, Array
15from mindsdb.utilities import log
17logger = log.getLogger(__name__)
19# revision identifiers, used by Alembic.
20revision = '17c3d2384711'
21down_revision = None
22branch_labels = None
23depends_on = None
25# ========================================== current database state ========================================
28class Base:
29 __allow_unmapped__ = True
32Base = declarative_base(cls=Base)
34# Source: https://stackoverflow.com/questions/26646362/numpy-array-is-not-json-serializable
37class Semaphor(Base):
38 __tablename__ = 'semaphor'
40 id = Column(Integer, primary_key=True)
41 updated_at = Column(DateTime, default=datetime.datetime.now, onupdate=datetime.datetime.now)
42 created_at = Column(DateTime, default=datetime.datetime.now)
43 entity_type = Column('entity_type', String)
44 entity_id = Column('entity_id', Integer)
45 action = Column(String)
46 company_id = Column(Integer)
47 uniq_const = UniqueConstraint('entity_type', 'entity_id')
50class Datasource(Base):
51 __tablename__ = 'datasource'
53 id = Column(Integer, primary_key=True)
54 updated_at = Column(DateTime, default=datetime.datetime.now, onupdate=datetime.datetime.now)
55 created_at = Column(DateTime, default=datetime.datetime.now)
56 name = Column(String)
57 data = Column(String) # Including, e.g. the query used to create it and even the connection info when there's no integration associated with it -- A JSON
58 creation_info = Column(String)
59 analysis = Column(String) # A JSON
60 company_id = Column(Integer)
61 mindsdb_version = Column(String)
62 datasources_version = Column(String)
63 integration_id = Column(Integer)
66class Predictor(Base):
67 __tablename__ = 'predictor'
69 id = Column(Integer, primary_key=True)
70 updated_at = Column(DateTime, default=datetime.datetime.now, onupdate=datetime.datetime.now)
71 created_at = Column(DateTime, default=datetime.datetime.now)
72 name = Column(String)
73 data = Column(Json) # A JSON -- should be everything returned by `get_model_data`, I think
74 to_predict = Column(Array)
75 company_id = Column(Integer)
76 mindsdb_version = Column(String)
77 native_version = Column(String)
78 datasource_id = Column(Integer)
79 is_custom = Column(Boolean) # to del
80 learn_args = Column(Json)
81 update_status = Column(String, default='up_to_date')
83 json_ai = Column(Json, nullable=True)
84 code = Column(String, nullable=True)
85 lightwood_version = Column(String, nullable=True)
86 dtype_dict = Column(Json, nullable=True)
89class AITable(Base):
90 __tablename__ = 'ai_table'
91 id = Column(Integer, primary_key=True)
92 updated_at = Column(DateTime, default=datetime.datetime.now, onupdate=datetime.datetime.now)
93 created_at = Column(DateTime, default=datetime.datetime.now)
94 name = Column(String)
95 integration_name = Column(String)
96 integration_query = Column(String)
97 query_fields = Column(Json)
98 predictor_name = Column(String)
99 predictor_columns = Column(Json)
100 company_id = Column(Integer)
103class Log(Base):
104 __tablename__ = 'log'
106 id = Column(Integer, primary_key=True)
107 created_at = Column(DateTime, default=datetime.datetime.now)
108 log_type = Column(String) # log, info, warning, traceback etc
109 source = Column(String) # file + line
110 company_id = Column(Integer)
111 payload = Column(String)
112 created_at_index = Index("some_index", "created_at_index")
115class Integration(Base):
116 __tablename__ = 'integration'
117 id = Column(Integer, primary_key=True)
118 updated_at = Column(DateTime, default=datetime.datetime.now, onupdate=datetime.datetime.now)
119 created_at = Column(DateTime, default=datetime.datetime.now)
120 name = Column(String, nullable=False)
121 data = Column(Json)
122 company_id = Column(Integer)
125class Stream(Base):
126 __tablename__ = 'stream'
127 id = Column(Integer, primary_key=True)
128 name = Column(String, nullable=False)
129 stream_in = Column(String, nullable=False)
130 stream_out = Column(String, nullable=False)
131 anomaly_stream = Column(String)
132 integration = Column(String)
133 predictor = Column(String, nullable=False)
134 company_id = Column(Integer)
135 updated_at = Column(DateTime, default=datetime.datetime.now, onupdate=datetime.datetime.now)
136 created_at = Column(DateTime, default=datetime.datetime.now)
137 type = Column(String, default='unknown')
138 connection_info = Column(Json, default={})
139 learning_params = Column(Json, default={})
140 learning_threshold = Column(Integer, default=0)
143# ====================================================================================================
146def upgrade():
147 '''
148 First migration.
149 Generates a migration script by difference between model and database and executes it
150 '''
152 target_metadata = Base.metadata
154 mc = context.get_context()
156 migration_script = produce_migrations(mc, target_metadata)
158 autogen_context = api.AutogenContext(
159 mc, autogenerate=True
160 )
162 # Seems to be the only way to apply changes to the database
163 template_args = {}
164 render._render_python_into_templatevars(
165 autogen_context, migration_script, template_args
166 )
168 code = template_args['upgrades']
169 code = code.replace('\n ', '\n')
170 logger.info('\nPerforming database changes:')
171 logger.info(code)
172 exec(code)
175def downgrade():
177 # We don't know state to downgrade
178 raise NotImplementedError()